1#[cfg(not(feature = "std"))]
16use alloc::{
17 boxed::Box,
18 format,
19 string::{String, ToString},
20 vec,
21 vec::Vec,
22};
23use core::{
24 fmt::{self, Display},
25 str::FromStr,
26};
27use helpers::attached_token::AttachedToken;
28
29use log::debug;
30
31use recursion::RecursionCounter;
32use IsLateral::*;
33use IsOptional::*;
34
35use crate::ast::helpers::stmt_create_table::{CreateTableBuilder, CreateTableConfiguration};
36use crate::ast::Statement::CreatePolicy;
37use crate::ast::*;
38use crate::dialect::*;
39use crate::keywords::{Keyword, ALL_KEYWORDS};
40use crate::tokenizer::*;
41
42mod alter;
43
44#[derive(Debug, Clone, PartialEq, Eq)]
45pub enum ParserError {
46 TokenizerError(String),
47 ParserError(String),
48 RecursionLimitExceeded,
49}
50
51type ParsedAction = (Keyword, Option<Vec<Ident>>);
53
54macro_rules! parser_err {
56 ($MSG:expr, $loc:expr) => {
57 Err(ParserError::ParserError(format!("{}{}", $MSG, $loc)))
58 };
59}
60
61#[cfg(feature = "std")]
62mod recursion {
64 use std::cell::Cell;
65 use std::rc::Rc;
66
67 use super::ParserError;
68
69 pub(crate) struct RecursionCounter {
77 remaining_depth: Rc<Cell<usize>>,
78 }
79
80 impl RecursionCounter {
81 pub fn new(remaining_depth: usize) -> Self {
84 Self {
85 remaining_depth: Rc::new(remaining_depth.into()),
86 }
87 }
88
89 pub fn try_decrease(&self) -> Result<DepthGuard, ParserError> {
96 let old_value = self.remaining_depth.get();
97 if old_value == 0 {
99 Err(ParserError::RecursionLimitExceeded)
100 } else {
101 self.remaining_depth.set(old_value - 1);
102 Ok(DepthGuard::new(Rc::clone(&self.remaining_depth)))
103 }
104 }
105 }
106
107 pub struct DepthGuard {
109 remaining_depth: Rc<Cell<usize>>,
110 }
111
112 impl DepthGuard {
113 fn new(remaining_depth: Rc<Cell<usize>>) -> Self {
114 Self { remaining_depth }
115 }
116 }
117 impl Drop for DepthGuard {
118 fn drop(&mut self) {
119 let old_value = self.remaining_depth.get();
120 self.remaining_depth.set(old_value + 1);
121 }
122 }
123}
124
125#[cfg(not(feature = "std"))]
126mod recursion {
127 pub(crate) struct RecursionCounter {}
133
134 impl RecursionCounter {
135 pub fn new(_remaining_depth: usize) -> Self {
136 Self {}
137 }
138 pub fn try_decrease(&self) -> Result<DepthGuard, super::ParserError> {
139 Ok(DepthGuard {})
140 }
141 }
142
143 pub struct DepthGuard {}
144}
145
146#[derive(PartialEq, Eq)]
147pub enum IsOptional {
148 Optional,
149 Mandatory,
150}
151
152pub enum IsLateral {
153 Lateral,
154 NotLateral,
155}
156
157pub enum WildcardExpr {
158 Expr(Expr),
159 QualifiedWildcard(ObjectName),
160 Wildcard,
161}
162
163impl From<TokenizerError> for ParserError {
164 fn from(e: TokenizerError) -> Self {
165 ParserError::TokenizerError(e.to_string())
166 }
167}
168
169impl fmt::Display for ParserError {
170 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
171 write!(
172 f,
173 "sql parser error: {}",
174 match self {
175 ParserError::TokenizerError(s) => s,
176 ParserError::ParserError(s) => s,
177 ParserError::RecursionLimitExceeded => "recursion limit exceeded",
178 }
179 )
180 }
181}
182
183#[cfg(feature = "std")]
184impl std::error::Error for ParserError {}
185
186const DEFAULT_REMAINING_DEPTH: usize = 50;
188
189struct MatchedTrailingBracket(bool);
202
203impl From<bool> for MatchedTrailingBracket {
204 fn from(value: bool) -> Self {
205 Self(value)
206 }
207}
208
209#[derive(Debug, Clone, PartialEq, Eq)]
211pub struct ParserOptions {
212 pub trailing_commas: bool,
213 pub unescape: bool,
216}
217
218impl Default for ParserOptions {
219 fn default() -> Self {
220 Self {
221 trailing_commas: false,
222 unescape: true,
223 }
224 }
225}
226
227impl ParserOptions {
228 pub fn new() -> Self {
230 Default::default()
231 }
232
233 pub fn with_trailing_commas(mut self, trailing_commas: bool) -> Self {
245 self.trailing_commas = trailing_commas;
246 self
247 }
248
249 pub fn with_unescape(mut self, unescape: bool) -> Self {
252 self.unescape = unescape;
253 self
254 }
255}
256
257#[derive(Copy, Clone)]
258enum ParserState {
259 Normal,
261 ConnectBy,
265}
266
267pub struct Parser<'a> {
268 tokens: Vec<TokenWithSpan>,
269 index: usize,
271 state: ParserState,
273 dialect: &'a dyn Dialect,
275 options: ParserOptions,
279 recursion_counter: RecursionCounter,
281}
282
283impl<'a> Parser<'a> {
284 pub fn new(dialect: &'a dyn Dialect) -> Self {
300 Self {
301 tokens: vec![],
302 index: 0,
303 state: ParserState::Normal,
304 dialect,
305 recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH),
306 options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()),
307 }
308 }
309
310 pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
330 self.recursion_counter = RecursionCounter::new(recursion_limit);
331 self
332 }
333
334 pub fn with_options(mut self, options: ParserOptions) -> Self {
357 self.options = options;
358 self
359 }
360
361 pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithSpan>) -> Self {
363 self.tokens = tokens;
364 self.index = 0;
365 self
366 }
367
368 pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
370 let tokens_with_locations: Vec<TokenWithSpan> = tokens
372 .into_iter()
373 .map(|token| TokenWithSpan {
374 token,
375 span: Span::empty(),
376 })
377 .collect();
378 self.with_tokens_with_locations(tokens_with_locations)
379 }
380
381 pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
388 debug!("Parsing sql '{}'...", sql);
389 let tokens = Tokenizer::new(self.dialect, sql)
390 .with_unescape(self.options.unescape)
391 .tokenize_with_location()?;
392 Ok(self.with_tokens_with_locations(tokens))
393 }
394
395 pub fn parse_statements(&mut self) -> Result<Vec<Statement>, ParserError> {
411 let mut stmts = Vec::new();
412 let mut expecting_statement_delimiter = false;
413 loop {
414 while self.consume_token(&Token::SemiColon) {
416 expecting_statement_delimiter = false;
417 }
418
419 match self.peek_token().token {
420 Token::EOF => break,
421
422 Token::Word(word) => {
424 if expecting_statement_delimiter && word.keyword == Keyword::END {
425 break;
426 }
427 }
428 _ => {}
429 }
430
431 if expecting_statement_delimiter {
432 return self.expected("end of statement", self.peek_token());
433 }
434
435 let statement = self.parse_statement()?;
436 stmts.push(statement);
437 expecting_statement_delimiter = true;
438 }
439 Ok(stmts)
440 }
441
442 pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
458 Parser::new(dialect).try_with_sql(sql)?.parse_statements()
459 }
460
461 pub fn parse_statement(&mut self) -> Result<Statement, ParserError> {
464 let _guard = self.recursion_counter.try_decrease()?;
465
466 if let Some(statement) = self.dialect.parse_statement(self) {
468 return statement;
469 }
470
471 let next_token = self.next_token();
472 match &next_token.token {
473 Token::Word(w) => match w.keyword {
474 Keyword::KILL => self.parse_kill(),
475 Keyword::FLUSH => self.parse_flush(),
476 Keyword::DESC => self.parse_explain(DescribeAlias::Desc),
477 Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe),
478 Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain),
479 Keyword::ANALYZE => self.parse_analyze(),
480 Keyword::SELECT | Keyword::WITH | Keyword::VALUES => {
481 self.prev_token();
482 self.parse_query().map(Statement::Query)
483 }
484 Keyword::TRUNCATE => self.parse_truncate(),
485 Keyword::ATTACH => {
486 if dialect_of!(self is DuckDbDialect) {
487 self.parse_attach_duckdb_database()
488 } else {
489 self.parse_attach_database()
490 }
491 }
492 Keyword::DETACH if dialect_of!(self is DuckDbDialect | GenericDialect) => {
493 self.parse_detach_duckdb_database()
494 }
495 Keyword::MSCK => self.parse_msck(),
496 Keyword::CREATE => self.parse_create(),
497 Keyword::CACHE => self.parse_cache_table(),
498 Keyword::DROP => self.parse_drop(),
499 Keyword::DISCARD => self.parse_discard(),
500 Keyword::DECLARE => self.parse_declare(),
501 Keyword::FETCH => self.parse_fetch_statement(),
502 Keyword::DELETE => self.parse_delete(),
503 Keyword::INSERT => self.parse_insert(),
504 Keyword::REPLACE => self.parse_replace(),
505 Keyword::UNCACHE => self.parse_uncache_table(),
506 Keyword::UPDATE => self.parse_update(),
507 Keyword::ALTER => self.parse_alter(),
508 Keyword::CALL => self.parse_call(),
509 Keyword::COPY => self.parse_copy(),
510 Keyword::CLOSE => self.parse_close(),
511 Keyword::SET => self.parse_set(),
512 Keyword::SHOW => self.parse_show(),
513 Keyword::USE => self.parse_use(),
514 Keyword::GRANT => self.parse_grant(),
515 Keyword::REVOKE => self.parse_revoke(),
516 Keyword::START => self.parse_start_transaction(),
517 Keyword::BEGIN => self.parse_begin(),
521 Keyword::END => self.parse_end(),
525 Keyword::SAVEPOINT => self.parse_savepoint(),
526 Keyword::RELEASE => self.parse_release(),
527 Keyword::COMMIT => self.parse_commit(),
528 Keyword::ROLLBACK => self.parse_rollback(),
529 Keyword::ASSERT => self.parse_assert(),
530 Keyword::DEALLOCATE => self.parse_deallocate(),
533 Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(),
534 Keyword::PREPARE => self.parse_prepare(),
535 Keyword::MERGE => self.parse_merge(),
536 Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(),
539 Keyword::UNLISTEN if self.dialect.supports_listen_notify() => self.parse_unlisten(),
540 Keyword::NOTIFY if self.dialect.supports_listen_notify() => self.parse_notify(),
541 Keyword::PRAGMA => self.parse_pragma(),
543 Keyword::UNLOAD => self.parse_unload(),
544 Keyword::INSTALL if dialect_of!(self is DuckDbDialect | GenericDialect) => {
546 self.parse_install()
547 }
548 Keyword::LOAD => self.parse_load(),
549 Keyword::OPTIMIZE if dialect_of!(self is ClickHouseDialect | GenericDialect) => {
551 self.parse_optimize_table()
552 }
553 Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(),
555 _ => self.expected("an SQL statement", next_token),
556 },
557 Token::LParen => {
558 self.prev_token();
559 self.parse_query().map(Statement::Query)
560 }
561 _ => self.expected("an SQL statement", next_token),
562 }
563 }
564
565 pub fn parse_comment(&mut self) -> Result<Statement, ParserError> {
566 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
567
568 self.expect_keyword(Keyword::ON)?;
569 let token = self.next_token();
570
571 let (object_type, object_name) = match token.token {
572 Token::Word(w) if w.keyword == Keyword::COLUMN => {
573 (CommentObject::Column, self.parse_object_name(false)?)
574 }
575 Token::Word(w) if w.keyword == Keyword::TABLE => {
576 (CommentObject::Table, self.parse_object_name(false)?)
577 }
578 Token::Word(w) if w.keyword == Keyword::EXTENSION => {
579 (CommentObject::Extension, self.parse_object_name(false)?)
580 }
581 Token::Word(w) if w.keyword == Keyword::SCHEMA => {
582 (CommentObject::Schema, self.parse_object_name(false)?)
583 }
584 Token::Word(w) if w.keyword == Keyword::DATABASE => {
585 (CommentObject::Database, self.parse_object_name(false)?)
586 }
587 Token::Word(w) if w.keyword == Keyword::USER => {
588 (CommentObject::User, self.parse_object_name(false)?)
589 }
590 Token::Word(w) if w.keyword == Keyword::ROLE => {
591 (CommentObject::Role, self.parse_object_name(false)?)
592 }
593 _ => self.expected("comment object_type", token)?,
594 };
595
596 self.expect_keyword(Keyword::IS)?;
597 let comment = if self.parse_keyword(Keyword::NULL) {
598 None
599 } else {
600 Some(self.parse_literal_string()?)
601 };
602 Ok(Statement::Comment {
603 object_type,
604 object_name,
605 comment,
606 if_exists,
607 })
608 }
609
610 pub fn parse_flush(&mut self) -> Result<Statement, ParserError> {
611 let mut channel = None;
612 let mut tables: Vec<ObjectName> = vec![];
613 let mut read_lock = false;
614 let mut export = false;
615
616 if !dialect_of!(self is MySqlDialect | GenericDialect) {
617 return parser_err!("Unsupported statement FLUSH", self.peek_token().span.start);
618 }
619
620 let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) {
621 Some(FlushLocation::NoWriteToBinlog)
622 } else if self.parse_keyword(Keyword::LOCAL) {
623 Some(FlushLocation::Local)
624 } else {
625 None
626 };
627
628 let object_type = if self.parse_keywords(&[Keyword::BINARY, Keyword::LOGS]) {
629 FlushType::BinaryLogs
630 } else if self.parse_keywords(&[Keyword::ENGINE, Keyword::LOGS]) {
631 FlushType::EngineLogs
632 } else if self.parse_keywords(&[Keyword::ERROR, Keyword::LOGS]) {
633 FlushType::ErrorLogs
634 } else if self.parse_keywords(&[Keyword::GENERAL, Keyword::LOGS]) {
635 FlushType::GeneralLogs
636 } else if self.parse_keywords(&[Keyword::HOSTS]) {
637 FlushType::Hosts
638 } else if self.parse_keyword(Keyword::PRIVILEGES) {
639 FlushType::Privileges
640 } else if self.parse_keyword(Keyword::OPTIMIZER_COSTS) {
641 FlushType::OptimizerCosts
642 } else if self.parse_keywords(&[Keyword::RELAY, Keyword::LOGS]) {
643 if self.parse_keywords(&[Keyword::FOR, Keyword::CHANNEL]) {
644 channel = Some(self.parse_object_name(false).unwrap().to_string());
645 }
646 FlushType::RelayLogs
647 } else if self.parse_keywords(&[Keyword::SLOW, Keyword::LOGS]) {
648 FlushType::SlowLogs
649 } else if self.parse_keyword(Keyword::STATUS) {
650 FlushType::Status
651 } else if self.parse_keyword(Keyword::USER_RESOURCES) {
652 FlushType::UserResources
653 } else if self.parse_keywords(&[Keyword::LOGS]) {
654 FlushType::Logs
655 } else if self.parse_keywords(&[Keyword::TABLES]) {
656 loop {
657 let next_token = self.next_token();
658 match &next_token.token {
659 Token::Word(w) => match w.keyword {
660 Keyword::WITH => {
661 read_lock = self.parse_keywords(&[Keyword::READ, Keyword::LOCK]);
662 }
663 Keyword::FOR => {
664 export = self.parse_keyword(Keyword::EXPORT);
665 }
666 Keyword::NoKeyword => {
667 self.prev_token();
668 tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
669 }
670 _ => {}
671 },
672 _ => {
673 break;
674 }
675 }
676 }
677
678 FlushType::Tables
679 } else {
680 return self.expected(
681 "BINARY LOGS, ENGINE LOGS, ERROR LOGS, GENERAL LOGS, HOSTS, LOGS, PRIVILEGES, OPTIMIZER_COSTS,\
682 RELAY LOGS [FOR CHANNEL channel], SLOW LOGS, STATUS, USER_RESOURCES",
683 self.peek_token(),
684 );
685 };
686
687 Ok(Statement::Flush {
688 object_type,
689 location,
690 channel,
691 read_lock,
692 export,
693 tables,
694 })
695 }
696
697 pub fn parse_msck(&mut self) -> Result<Statement, ParserError> {
698 let repair = self.parse_keyword(Keyword::REPAIR);
699 self.expect_keyword(Keyword::TABLE)?;
700 let table_name = self.parse_object_name(false)?;
701 let partition_action = self
702 .maybe_parse(|parser| {
703 let pa = match parser.parse_one_of_keywords(&[
704 Keyword::ADD,
705 Keyword::DROP,
706 Keyword::SYNC,
707 ]) {
708 Some(Keyword::ADD) => Some(AddDropSync::ADD),
709 Some(Keyword::DROP) => Some(AddDropSync::DROP),
710 Some(Keyword::SYNC) => Some(AddDropSync::SYNC),
711 _ => None,
712 };
713 parser.expect_keyword(Keyword::PARTITIONS)?;
714 Ok(pa)
715 })?
716 .unwrap_or_default();
717 Ok(Statement::Msck {
718 repair,
719 table_name,
720 partition_action,
721 })
722 }
723
724 pub fn parse_truncate(&mut self) -> Result<Statement, ParserError> {
725 let table = self.parse_keyword(Keyword::TABLE);
726 let only = self.parse_keyword(Keyword::ONLY);
727
728 let table_names = self
729 .parse_comma_separated(|p| p.parse_object_name(false))?
730 .into_iter()
731 .map(|n| TruncateTableTarget { name: n })
732 .collect();
733
734 let mut partitions = None;
735 if self.parse_keyword(Keyword::PARTITION) {
736 self.expect_token(&Token::LParen)?;
737 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
738 self.expect_token(&Token::RParen)?;
739 }
740
741 let mut identity = None;
742 let mut cascade = None;
743
744 if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
745 identity = if self.parse_keywords(&[Keyword::RESTART, Keyword::IDENTITY]) {
746 Some(TruncateIdentityOption::Restart)
747 } else if self.parse_keywords(&[Keyword::CONTINUE, Keyword::IDENTITY]) {
748 Some(TruncateIdentityOption::Continue)
749 } else {
750 None
751 };
752
753 cascade = if self.parse_keyword(Keyword::CASCADE) {
754 Some(TruncateCascadeOption::Cascade)
755 } else if self.parse_keyword(Keyword::RESTRICT) {
756 Some(TruncateCascadeOption::Restrict)
757 } else {
758 None
759 };
760 };
761
762 let on_cluster = self.parse_optional_on_cluster()?;
763
764 Ok(Statement::Truncate {
765 table_names,
766 partitions,
767 table,
768 only,
769 identity,
770 cascade,
771 on_cluster,
772 })
773 }
774
775 pub fn parse_attach_duckdb_database_options(
776 &mut self,
777 ) -> Result<Vec<AttachDuckDBDatabaseOption>, ParserError> {
778 if !self.consume_token(&Token::LParen) {
779 return Ok(vec![]);
780 }
781
782 let mut options = vec![];
783 loop {
784 if self.parse_keyword(Keyword::READ_ONLY) {
785 let boolean = if self.parse_keyword(Keyword::TRUE) {
786 Some(true)
787 } else if self.parse_keyword(Keyword::FALSE) {
788 Some(false)
789 } else {
790 None
791 };
792 options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean));
793 } else if self.parse_keyword(Keyword::TYPE) {
794 let ident = self.parse_identifier(false)?;
795 options.push(AttachDuckDBDatabaseOption::Type(ident));
796 } else {
797 return self.expected("expected one of: ), READ_ONLY, TYPE", self.peek_token());
798 };
799
800 if self.consume_token(&Token::RParen) {
801 return Ok(options);
802 } else if self.consume_token(&Token::Comma) {
803 continue;
804 } else {
805 return self.expected("expected one of: ')', ','", self.peek_token());
806 }
807 }
808 }
809
810 pub fn parse_attach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
811 let database = self.parse_keyword(Keyword::DATABASE);
812 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
813 let database_path = self.parse_identifier(false)?;
814 let database_alias = if self.parse_keyword(Keyword::AS) {
815 Some(self.parse_identifier(false)?)
816 } else {
817 None
818 };
819
820 let attach_options = self.parse_attach_duckdb_database_options()?;
821 Ok(Statement::AttachDuckDBDatabase {
822 if_not_exists,
823 database,
824 database_path,
825 database_alias,
826 attach_options,
827 })
828 }
829
830 pub fn parse_detach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
831 let database = self.parse_keyword(Keyword::DATABASE);
832 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
833 let database_alias = self.parse_identifier(false)?;
834 Ok(Statement::DetachDuckDBDatabase {
835 if_exists,
836 database,
837 database_alias,
838 })
839 }
840
841 pub fn parse_attach_database(&mut self) -> Result<Statement, ParserError> {
842 let database = self.parse_keyword(Keyword::DATABASE);
843 let database_file_name = self.parse_expr()?;
844 self.expect_keyword(Keyword::AS)?;
845 let schema_name = self.parse_identifier(false)?;
846 Ok(Statement::AttachDatabase {
847 database,
848 schema_name,
849 database_file_name,
850 })
851 }
852
853 pub fn parse_analyze(&mut self) -> Result<Statement, ParserError> {
854 self.expect_keyword(Keyword::TABLE)?;
855 let table_name = self.parse_object_name(false)?;
856 let mut for_columns = false;
857 let mut cache_metadata = false;
858 let mut noscan = false;
859 let mut partitions = None;
860 let mut compute_statistics = false;
861 let mut columns = vec![];
862 loop {
863 match self.parse_one_of_keywords(&[
864 Keyword::PARTITION,
865 Keyword::FOR,
866 Keyword::CACHE,
867 Keyword::NOSCAN,
868 Keyword::COMPUTE,
869 ]) {
870 Some(Keyword::PARTITION) => {
871 self.expect_token(&Token::LParen)?;
872 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
873 self.expect_token(&Token::RParen)?;
874 }
875 Some(Keyword::NOSCAN) => noscan = true,
876 Some(Keyword::FOR) => {
877 self.expect_keyword(Keyword::COLUMNS)?;
878
879 columns = self
880 .maybe_parse(|parser| {
881 parser.parse_comma_separated(|p| p.parse_identifier(false))
882 })?
883 .unwrap_or_default();
884 for_columns = true
885 }
886 Some(Keyword::CACHE) => {
887 self.expect_keyword(Keyword::METADATA)?;
888 cache_metadata = true
889 }
890 Some(Keyword::COMPUTE) => {
891 self.expect_keyword(Keyword::STATISTICS)?;
892 compute_statistics = true
893 }
894 _ => break,
895 }
896 }
897
898 Ok(Statement::Analyze {
899 table_name,
900 for_columns,
901 columns,
902 partitions,
903 cache_metadata,
904 noscan,
905 compute_statistics,
906 })
907 }
908
909 pub fn parse_wildcard_expr(&mut self) -> Result<Expr, ParserError> {
911 let index = self.index;
912
913 let next_token = self.next_token();
914 match next_token.token {
915 t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
916 if self.peek_token().token == Token::Period {
917 let mut id_parts: Vec<Ident> = vec![match t {
918 Token::Word(w) => w.to_ident(next_token.span),
919 Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
920 _ => unreachable!(), }];
922
923 while self.consume_token(&Token::Period) {
924 let next_token = self.next_token();
925 match next_token.token {
926 Token::Word(w) => id_parts.push(w.to_ident(next_token.span)),
927 Token::SingleQuotedString(s) => {
928 id_parts.push(Ident::with_quote('\'', s))
930 }
931 Token::Mul => {
932 return Ok(Expr::QualifiedWildcard(
933 ObjectName(id_parts),
934 AttachedToken(next_token),
935 ));
936 }
937 _ => {
938 return self
939 .expected("an identifier or a '*' after '.'", next_token);
940 }
941 }
942 }
943 }
944 }
945 Token::Mul => {
946 return Ok(Expr::Wildcard(AttachedToken(next_token)));
947 }
948 _ => (),
949 };
950
951 self.index = index;
952 self.parse_expr()
953 }
954
955 pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
957 self.parse_subexpr(self.dialect.prec_unknown())
958 }
959
960 pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
962 let _guard = self.recursion_counter.try_decrease()?;
963 debug!("parsing expr");
964 let mut expr = self.parse_prefix()?;
965 debug!("prefix: {:?}", expr);
966 loop {
967 let next_precedence = self.get_next_precedence()?;
968 debug!("next precedence: {:?}", next_precedence);
969
970 if precedence >= next_precedence {
971 break;
972 }
973
974 expr = self.parse_infix(expr, next_precedence)?;
975 }
976 Ok(expr)
977 }
978
979 pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
980 let condition = self.parse_expr()?;
981 let message = if self.parse_keyword(Keyword::AS) {
982 Some(self.parse_expr()?)
983 } else {
984 None
985 };
986
987 Ok(Statement::Assert { condition, message })
988 }
989
990 pub fn parse_savepoint(&mut self) -> Result<Statement, ParserError> {
991 let name = self.parse_identifier(false)?;
992 Ok(Statement::Savepoint { name })
993 }
994
995 pub fn parse_release(&mut self) -> Result<Statement, ParserError> {
996 let _ = self.parse_keyword(Keyword::SAVEPOINT);
997 let name = self.parse_identifier(false)?;
998
999 Ok(Statement::ReleaseSavepoint { name })
1000 }
1001
1002 pub fn parse_listen(&mut self) -> Result<Statement, ParserError> {
1003 let channel = self.parse_identifier(false)?;
1004 Ok(Statement::LISTEN { channel })
1005 }
1006
1007 pub fn parse_unlisten(&mut self) -> Result<Statement, ParserError> {
1008 let channel = if self.consume_token(&Token::Mul) {
1009 Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string())
1010 } else {
1011 match self.parse_identifier(false) {
1012 Ok(expr) => expr,
1013 _ => {
1014 self.prev_token();
1015 return self.expected("wildcard or identifier", self.peek_token());
1016 }
1017 }
1018 };
1019 Ok(Statement::UNLISTEN { channel })
1020 }
1021
1022 pub fn parse_notify(&mut self) -> Result<Statement, ParserError> {
1023 let channel = self.parse_identifier(false)?;
1024 let payload = if self.consume_token(&Token::Comma) {
1025 Some(self.parse_literal_string()?)
1026 } else {
1027 None
1028 };
1029 Ok(Statement::NOTIFY { channel, payload })
1030 }
1031
1032 fn parse_expr_prefix_by_reserved_word(
1035 &mut self,
1036 w: &Word,
1037 w_span: Span,
1038 ) -> Result<Option<Expr>, ParserError> {
1039 match w.keyword {
1040 Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => {
1041 self.prev_token();
1042 Ok(Some(Expr::Value(self.parse_value()?)))
1043 }
1044 Keyword::NULL => {
1045 self.prev_token();
1046 Ok(Some(Expr::Value(self.parse_value()?)))
1047 }
1048 Keyword::CURRENT_CATALOG
1049 | Keyword::CURRENT_USER
1050 | Keyword::SESSION_USER
1051 | Keyword::USER
1052 if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1053 {
1054 Ok(Some(Expr::Function(Function {
1055 name: ObjectName(vec![w.to_ident(w_span)]),
1056 uses_odbc_syntax: false,
1057 parameters: FunctionArguments::None,
1058 args: FunctionArguments::None,
1059 null_treatment: None,
1060 filter: None,
1061 over: None,
1062 within_group: vec![],
1063 })))
1064 }
1065 Keyword::CURRENT_TIMESTAMP
1066 | Keyword::CURRENT_TIME
1067 | Keyword::CURRENT_DATE
1068 | Keyword::LOCALTIME
1069 | Keyword::LOCALTIMESTAMP => {
1070 Ok(Some(self.parse_time_functions(ObjectName(vec![w.to_ident(w_span)]))?))
1071 }
1072 Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
1073 Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
1074 Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => Ok(Some(self.parse_convert_expr(true)?)),
1075 Keyword::CAST => Ok(Some(self.parse_cast_expr(CastKind::Cast)?)),
1076 Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)),
1077 Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)),
1078 Keyword::EXISTS
1079 if !dialect_of!(self is DatabricksDialect)
1081 || matches!(
1082 self.peek_nth_token(1).token,
1083 Token::Word(Word {
1084 keyword: Keyword::SELECT | Keyword::WITH,
1085 ..
1086 })
1087 ) =>
1088 {
1089 Ok(Some(self.parse_exists_expr(false)?))
1090 }
1091 Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)),
1092 Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
1093 Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
1094 Keyword::POSITION if self.peek_token().token == Token::LParen => {
1095 Ok(Some(self.parse_position_expr(w.to_ident(w_span))?))
1096 }
1097 Keyword::SUBSTRING => Ok(Some(self.parse_substring_expr()?)),
1098 Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
1099 Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)),
1100 Keyword::INTERVAL => Ok(Some(self.parse_interval()?)),
1101 Keyword::ARRAY if self.peek_token() == Token::LBracket => {
1103 self.expect_token(&Token::LBracket)?;
1104 Ok(Some(self.parse_array_expr(true)?))
1105 }
1106 Keyword::ARRAY
1107 if self.peek_token() == Token::LParen
1108 && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1109 {
1110 self.expect_token(&Token::LParen)?;
1111 let query = self.parse_query()?;
1112 self.expect_token(&Token::RParen)?;
1113 Ok(Some(Expr::Function(Function {
1114 name: ObjectName(vec![w.to_ident(w_span)]),
1115 uses_odbc_syntax: false,
1116 parameters: FunctionArguments::None,
1117 args: FunctionArguments::Subquery(query),
1118 filter: None,
1119 null_treatment: None,
1120 over: None,
1121 within_group: vec![],
1122 })))
1123 }
1124 Keyword::NOT => Ok(Some(self.parse_not()?)),
1125 Keyword::MATCH if dialect_of!(self is MySqlDialect | GenericDialect) => {
1126 Ok(Some(self.parse_match_against()?))
1127 }
1128 Keyword::STRUCT if self.dialect.supports_struct_literal() => {
1129 Ok(Some(self.parse_struct_literal()?))
1130 }
1131 Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => {
1132 let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?;
1133 Ok(Some(Expr::Prior(Box::new(expr))))
1134 }
1135 Keyword::MAP if self.peek_token() == Token::LBrace && self.dialect.support_map_literal_syntax() => {
1136 Ok(Some(self.parse_duckdb_map_literal()?))
1137 }
1138 _ => Ok(None)
1139 }
1140 }
1141
1142 fn parse_expr_prefix_by_unreserved_word(
1144 &mut self,
1145 w: &Word,
1146 w_span: Span,
1147 ) -> Result<Expr, ParserError> {
1148 match self.peek_token().token {
1149 Token::LParen | Token::Period => {
1150 let mut id_parts: Vec<Ident> = vec![w.to_ident(w_span)];
1151 let mut ending_wildcard: Option<TokenWithSpan> = None;
1152 while self.consume_token(&Token::Period) {
1153 let next_token = self.next_token();
1154 match next_token.token {
1155 Token::Word(w) => id_parts.push(w.to_ident(next_token.span)),
1156 Token::Mul => {
1157 if dialect_of!(self is PostgreSqlDialect) {
1160 ending_wildcard = Some(next_token);
1161 break;
1162 } else {
1163 return self.expected("an identifier after '.'", next_token);
1164 }
1165 }
1166 Token::SingleQuotedString(s) => id_parts.push(Ident::with_quote('\'', s)),
1167 _ => {
1168 return self.expected("an identifier or a '*' after '.'", next_token);
1169 }
1170 }
1171 }
1172
1173 if let Some(wildcard_token) = ending_wildcard {
1174 Ok(Expr::QualifiedWildcard(
1175 ObjectName(id_parts),
1176 AttachedToken(wildcard_token),
1177 ))
1178 } else if self.consume_token(&Token::LParen) {
1179 if dialect_of!(self is SnowflakeDialect | MsSqlDialect)
1180 && self.consume_tokens(&[Token::Plus, Token::RParen])
1181 {
1182 Ok(Expr::OuterJoin(Box::new(
1183 match <[Ident; 1]>::try_from(id_parts) {
1184 Ok([ident]) => Expr::Identifier(ident),
1185 Err(parts) => Expr::CompoundIdentifier(parts),
1186 },
1187 )))
1188 } else {
1189 self.prev_token();
1190 self.parse_function(ObjectName(id_parts))
1191 }
1192 } else {
1193 Ok(Expr::CompoundIdentifier(id_parts))
1194 }
1195 }
1196 Token::SingleQuotedString(_)
1198 | Token::DoubleQuotedString(_)
1199 | Token::HexStringLiteral(_)
1200 if w.value.starts_with('_') =>
1201 {
1202 Ok(Expr::IntroducedString {
1203 introducer: w.value.clone(),
1204 value: self.parse_introduced_string_value()?,
1205 })
1206 }
1207 Token::Arrow if self.dialect.supports_lambda_functions() => {
1208 self.expect_token(&Token::Arrow)?;
1209 Ok(Expr::Lambda(LambdaFunction {
1210 params: OneOrManyWithParens::One(w.to_ident(w_span)),
1211 body: Box::new(self.parse_expr()?),
1212 }))
1213 }
1214 _ => Ok(Expr::Identifier(w.to_ident(w_span))),
1215 }
1216 }
1217
1218 pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
1220 if let Some(prefix) = self.dialect.parse_prefix(self) {
1222 return prefix;
1223 }
1224
1225 let loc = self.peek_token().span.start;
1242 let opt_expr = self.maybe_parse(|parser| {
1243 match parser.parse_data_type()? {
1244 DataType::Interval => parser.parse_interval(),
1245 DataType::Custom(..) => parser_err!("dummy", loc),
1253 data_type => Ok(Expr::TypedString {
1254 data_type,
1255 value: parser.parse_literal_string()?,
1256 }),
1257 }
1258 })?;
1259
1260 if let Some(expr) = opt_expr {
1261 return Ok(expr);
1262 }
1263
1264 let next_token = self.next_token();
1265 let expr = match next_token.token {
1266 Token::Word(w) => {
1267 match self.try_parse(|parser| {
1276 parser.parse_expr_prefix_by_reserved_word(&w, next_token.span)
1277 }) {
1278 Ok(Some(expr)) => Ok(expr),
1280
1281 Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, next_token.span)?),
1283
1284 Err(e) => {
1291 if !self.dialect.is_reserved_for_identifier(w.keyword) {
1292 if let Ok(Some(expr)) = self.maybe_parse(|parser| {
1293 parser.parse_expr_prefix_by_unreserved_word(&w, next_token.span)
1294 }) {
1295 return Ok(expr);
1296 }
1297 }
1298 return Err(e);
1299 }
1300 }
1301 } Token::LBracket => self.parse_array_expr(false),
1304 tok @ Token::Minus | tok @ Token::Plus => {
1305 let op = if tok == Token::Plus {
1306 UnaryOperator::Plus
1307 } else {
1308 UnaryOperator::Minus
1309 };
1310 Ok(Expr::UnaryOp {
1311 op,
1312 expr: Box::new(
1313 self.parse_subexpr(self.dialect.prec_value(Precedence::MulDivModOp))?,
1314 ),
1315 })
1316 }
1317 Token::ExclamationMark if self.dialect.supports_bang_not_operator() => {
1318 Ok(Expr::UnaryOp {
1319 op: UnaryOperator::BangNot,
1320 expr: Box::new(
1321 self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?,
1322 ),
1323 })
1324 }
1325 tok @ Token::DoubleExclamationMark
1326 | tok @ Token::PGSquareRoot
1327 | tok @ Token::PGCubeRoot
1328 | tok @ Token::AtSign
1329 | tok @ Token::Tilde
1330 if dialect_of!(self is PostgreSqlDialect) =>
1331 {
1332 let op = match tok {
1333 Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial,
1334 Token::PGSquareRoot => UnaryOperator::PGSquareRoot,
1335 Token::PGCubeRoot => UnaryOperator::PGCubeRoot,
1336 Token::AtSign => UnaryOperator::PGAbs,
1337 Token::Tilde => UnaryOperator::PGBitwiseNot,
1338 _ => unreachable!(),
1339 };
1340 Ok(Expr::UnaryOp {
1341 op,
1342 expr: Box::new(
1343 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1344 ),
1345 })
1346 }
1347 Token::EscapedStringLiteral(_) if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1348 {
1349 self.prev_token();
1350 Ok(Expr::Value(self.parse_value()?))
1351 }
1352 Token::UnicodeStringLiteral(_) => {
1353 self.prev_token();
1354 Ok(Expr::Value(self.parse_value()?))
1355 }
1356 Token::Number(_, _)
1357 | Token::SingleQuotedString(_)
1358 | Token::DoubleQuotedString(_)
1359 | Token::TripleSingleQuotedString(_)
1360 | Token::TripleDoubleQuotedString(_)
1361 | Token::DollarQuotedString(_)
1362 | Token::SingleQuotedByteStringLiteral(_)
1363 | Token::DoubleQuotedByteStringLiteral(_)
1364 | Token::TripleSingleQuotedByteStringLiteral(_)
1365 | Token::TripleDoubleQuotedByteStringLiteral(_)
1366 | Token::SingleQuotedRawStringLiteral(_)
1367 | Token::DoubleQuotedRawStringLiteral(_)
1368 | Token::TripleSingleQuotedRawStringLiteral(_)
1369 | Token::TripleDoubleQuotedRawStringLiteral(_)
1370 | Token::NationalStringLiteral(_)
1371 | Token::HexStringLiteral(_) => {
1372 self.prev_token();
1373 Ok(Expr::Value(self.parse_value()?))
1374 }
1375 Token::LParen => {
1376 let expr = if let Some(expr) = self.try_parse_expr_sub_query()? {
1377 expr
1378 } else if let Some(lambda) = self.try_parse_lambda()? {
1379 return Ok(lambda);
1380 } else {
1381 let exprs = self.parse_comma_separated(Parser::parse_expr)?;
1382 match exprs.len() {
1383 0 => unreachable!(), 1 => Expr::Nested(Box::new(exprs.into_iter().next().unwrap())),
1385 _ => Expr::Tuple(exprs),
1386 }
1387 };
1388 self.expect_token(&Token::RParen)?;
1389 let expr = self.try_parse_method(expr)?;
1390 if !self.consume_token(&Token::Period) {
1391 Ok(expr)
1392 } else {
1393 let tok = self.next_token();
1394 let key = match tok.token {
1395 Token::Word(word) => word.to_ident(tok.span),
1396 _ => {
1397 return parser_err!(
1398 format!("Expected identifier, found: {tok}"),
1399 tok.span.start
1400 )
1401 }
1402 };
1403 Ok(Expr::CompositeAccess {
1404 expr: Box::new(expr),
1405 key,
1406 })
1407 }
1408 }
1409 Token::Placeholder(_) | Token::Colon | Token::AtSign => {
1410 self.prev_token();
1411 Ok(Expr::Value(self.parse_value()?))
1412 }
1413 Token::LBrace => {
1414 self.prev_token();
1415 self.parse_lbrace_expr()
1416 }
1417 _ => self.expected("an expression", next_token),
1418 }?;
1419
1420 let expr = self.try_parse_method(expr)?;
1421
1422 if self.parse_keyword(Keyword::COLLATE) {
1423 Ok(Expr::Collate {
1424 expr: Box::new(expr),
1425 collation: self.parse_object_name(false)?,
1426 })
1427 } else {
1428 Ok(expr)
1429 }
1430 }
1431
1432 pub fn parse_utility_options(&mut self) -> Result<Vec<UtilityOption>, ParserError> {
1433 self.expect_token(&Token::LParen)?;
1434 let options = self.parse_comma_separated(Self::parse_utility_option)?;
1435 self.expect_token(&Token::RParen)?;
1436
1437 Ok(options)
1438 }
1439
1440 fn parse_utility_option(&mut self) -> Result<UtilityOption, ParserError> {
1441 let name = self.parse_identifier(false)?;
1442
1443 let next_token = self.peek_token();
1444 if next_token == Token::Comma || next_token == Token::RParen {
1445 return Ok(UtilityOption { name, arg: None });
1446 }
1447 let arg = self.parse_expr()?;
1448
1449 Ok(UtilityOption {
1450 name,
1451 arg: Some(arg),
1452 })
1453 }
1454
1455 fn try_parse_expr_sub_query(&mut self) -> Result<Option<Expr>, ParserError> {
1456 if !self.peek_sub_query() {
1457 return Ok(None);
1458 }
1459
1460 Ok(Some(Expr::Subquery(self.parse_query()?)))
1461 }
1462
1463 fn try_parse_lambda(&mut self) -> Result<Option<Expr>, ParserError> {
1464 if !self.dialect.supports_lambda_functions() {
1465 return Ok(None);
1466 }
1467 self.maybe_parse(|p| {
1468 let params = p.parse_comma_separated(|p| p.parse_identifier(false))?;
1469 p.expect_token(&Token::RParen)?;
1470 p.expect_token(&Token::Arrow)?;
1471 let expr = p.parse_expr()?;
1472 Ok(Expr::Lambda(LambdaFunction {
1473 params: OneOrManyWithParens::Many(params),
1474 body: Box::new(expr),
1475 }))
1476 })
1477 }
1478
1479 fn try_parse_method(&mut self, expr: Expr) -> Result<Expr, ParserError> {
1481 if !self.dialect.supports_methods() {
1482 return Ok(expr);
1483 }
1484 let method_chain = self.maybe_parse(|p| {
1485 let mut method_chain = Vec::new();
1486 while p.consume_token(&Token::Period) {
1487 let tok = p.next_token();
1488 let name = match tok.token {
1489 Token::Word(word) => word.to_ident(tok.span),
1490 _ => return p.expected("identifier", tok),
1491 };
1492 let func = match p.parse_function(ObjectName(vec![name]))? {
1493 Expr::Function(func) => func,
1494 _ => return p.expected("function", p.peek_token()),
1495 };
1496 method_chain.push(func);
1497 }
1498 if !method_chain.is_empty() {
1499 Ok(method_chain)
1500 } else {
1501 p.expected("function", p.peek_token())
1502 }
1503 })?;
1504 if let Some(method_chain) = method_chain {
1505 Ok(Expr::Method(Method {
1506 expr: Box::new(expr),
1507 method_chain,
1508 }))
1509 } else {
1510 Ok(expr)
1511 }
1512 }
1513
1514 fn maybe_parse_odbc_fn_body(&mut self) -> Result<Option<Expr>, ParserError> {
1523 self.maybe_parse(|p| {
1524 p.expect_keyword(Keyword::FN)?;
1525 let fn_name = p.parse_object_name(false)?;
1526 let mut fn_call = p.parse_function_call(fn_name)?;
1527 fn_call.uses_odbc_syntax = true;
1528 Ok(Expr::Function(fn_call))
1529 })
1530 }
1531
1532 pub fn parse_function(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
1533 self.parse_function_call(name).map(Expr::Function)
1534 }
1535
1536 fn parse_function_call(&mut self, name: ObjectName) -> Result<Function, ParserError> {
1537 self.expect_token(&Token::LParen)?;
1538
1539 if dialect_of!(self is SnowflakeDialect) && self.peek_sub_query() {
1542 let subquery = self.parse_query()?;
1543 self.expect_token(&Token::RParen)?;
1544 return Ok(Function {
1545 name,
1546 uses_odbc_syntax: false,
1547 parameters: FunctionArguments::None,
1548 args: FunctionArguments::Subquery(subquery),
1549 filter: None,
1550 null_treatment: None,
1551 over: None,
1552 within_group: vec![],
1553 });
1554 }
1555
1556 let mut args = self.parse_function_argument_list()?;
1557 let mut parameters = FunctionArguments::None;
1558 if dialect_of!(self is ClickHouseDialect | GenericDialect)
1561 && self.consume_token(&Token::LParen)
1562 {
1563 parameters = FunctionArguments::List(args);
1564 args = self.parse_function_argument_list()?;
1565 }
1566
1567 let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
1568 self.expect_token(&Token::LParen)?;
1569 self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
1570 let order_by = self.parse_comma_separated(Parser::parse_order_by_expr)?;
1571 self.expect_token(&Token::RParen)?;
1572 order_by
1573 } else {
1574 vec![]
1575 };
1576
1577 let filter = if self.dialect.supports_filter_during_aggregation()
1578 && self.parse_keyword(Keyword::FILTER)
1579 && self.consume_token(&Token::LParen)
1580 && self.parse_keyword(Keyword::WHERE)
1581 {
1582 let filter = Some(Box::new(self.parse_expr()?));
1583 self.expect_token(&Token::RParen)?;
1584 filter
1585 } else {
1586 None
1587 };
1588
1589 let null_treatment = if args
1592 .clauses
1593 .iter()
1594 .all(|clause| !matches!(clause, FunctionArgumentClause::IgnoreOrRespectNulls(_)))
1595 {
1596 self.parse_null_treatment()?
1597 } else {
1598 None
1599 };
1600
1601 let over = if self.parse_keyword(Keyword::OVER) {
1602 if self.consume_token(&Token::LParen) {
1603 let window_spec = self.parse_window_spec()?;
1604 Some(WindowType::WindowSpec(window_spec))
1605 } else {
1606 Some(WindowType::NamedWindow(self.parse_identifier(false)?))
1607 }
1608 } else {
1609 None
1610 };
1611
1612 Ok(Function {
1613 name,
1614 uses_odbc_syntax: false,
1615 parameters,
1616 args: FunctionArguments::List(args),
1617 null_treatment,
1618 filter,
1619 over,
1620 within_group,
1621 })
1622 }
1623
1624 fn parse_null_treatment(&mut self) -> Result<Option<NullTreatment>, ParserError> {
1626 match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) {
1627 Some(keyword) => {
1628 self.expect_keyword(Keyword::NULLS)?;
1629
1630 Ok(match keyword {
1631 Keyword::RESPECT => Some(NullTreatment::RespectNulls),
1632 Keyword::IGNORE => Some(NullTreatment::IgnoreNulls),
1633 _ => None,
1634 })
1635 }
1636 None => Ok(None),
1637 }
1638 }
1639
1640 pub fn parse_time_functions(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
1641 let args = if self.consume_token(&Token::LParen) {
1642 FunctionArguments::List(self.parse_function_argument_list()?)
1643 } else {
1644 FunctionArguments::None
1645 };
1646 Ok(Expr::Function(Function {
1647 name,
1648 uses_odbc_syntax: false,
1649 parameters: FunctionArguments::None,
1650 args,
1651 filter: None,
1652 over: None,
1653 null_treatment: None,
1654 within_group: vec![],
1655 }))
1656 }
1657
1658 pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
1659 let next_token = self.next_token();
1660 match &next_token.token {
1661 Token::Word(w) => match w.keyword {
1662 Keyword::ROWS => Ok(WindowFrameUnits::Rows),
1663 Keyword::RANGE => Ok(WindowFrameUnits::Range),
1664 Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
1665 _ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
1666 },
1667 _ => self.expected("ROWS, RANGE, GROUPS", next_token),
1668 }
1669 }
1670
1671 pub fn parse_window_frame(&mut self) -> Result<WindowFrame, ParserError> {
1672 let units = self.parse_window_frame_units()?;
1673 let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) {
1674 let start_bound = self.parse_window_frame_bound()?;
1675 self.expect_keyword(Keyword::AND)?;
1676 let end_bound = Some(self.parse_window_frame_bound()?);
1677 (start_bound, end_bound)
1678 } else {
1679 (self.parse_window_frame_bound()?, None)
1680 };
1681 Ok(WindowFrame {
1682 units,
1683 start_bound,
1684 end_bound,
1685 })
1686 }
1687
1688 pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
1690 if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
1691 Ok(WindowFrameBound::CurrentRow)
1692 } else {
1693 let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
1694 None
1695 } else {
1696 Some(Box::new(match self.peek_token().token {
1697 Token::SingleQuotedString(_) => self.parse_interval()?,
1698 _ => self.parse_expr()?,
1699 }))
1700 };
1701 if self.parse_keyword(Keyword::PRECEDING) {
1702 Ok(WindowFrameBound::Preceding(rows))
1703 } else if self.parse_keyword(Keyword::FOLLOWING) {
1704 Ok(WindowFrameBound::Following(rows))
1705 } else {
1706 self.expected("PRECEDING or FOLLOWING", self.peek_token())
1707 }
1708 }
1709 }
1710
1711 fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
1713 if self.dialect.supports_group_by_expr() {
1714 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
1715 self.expect_token(&Token::LParen)?;
1716 let result = self.parse_comma_separated(|p| p.parse_tuple(false, true))?;
1717 self.expect_token(&Token::RParen)?;
1718 Ok(Expr::GroupingSets(result))
1719 } else if self.parse_keyword(Keyword::CUBE) {
1720 self.expect_token(&Token::LParen)?;
1721 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
1722 self.expect_token(&Token::RParen)?;
1723 Ok(Expr::Cube(result))
1724 } else if self.parse_keyword(Keyword::ROLLUP) {
1725 self.expect_token(&Token::LParen)?;
1726 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
1727 self.expect_token(&Token::RParen)?;
1728 Ok(Expr::Rollup(result))
1729 } else if self.consume_tokens(&[Token::LParen, Token::RParen]) {
1730 Ok(Expr::Tuple(vec![]))
1734 } else {
1735 self.parse_expr()
1736 }
1737 } else {
1738 self.parse_expr()
1740 }
1741 }
1742
1743 fn parse_tuple(
1747 &mut self,
1748 lift_singleton: bool,
1749 allow_empty: bool,
1750 ) -> Result<Vec<Expr>, ParserError> {
1751 if lift_singleton {
1752 if self.consume_token(&Token::LParen) {
1753 let result = if allow_empty && self.consume_token(&Token::RParen) {
1754 vec![]
1755 } else {
1756 let result = self.parse_comma_separated(Parser::parse_expr)?;
1757 self.expect_token(&Token::RParen)?;
1758 result
1759 };
1760 Ok(result)
1761 } else {
1762 Ok(vec![self.parse_expr()?])
1763 }
1764 } else {
1765 self.expect_token(&Token::LParen)?;
1766 let result = if allow_empty && self.consume_token(&Token::RParen) {
1767 vec![]
1768 } else {
1769 let result = self.parse_comma_separated(Parser::parse_expr)?;
1770 self.expect_token(&Token::RParen)?;
1771 result
1772 };
1773 Ok(result)
1774 }
1775 }
1776
1777 pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
1778 let mut operand = None;
1779 if !self.parse_keyword(Keyword::WHEN) {
1780 operand = Some(Box::new(self.parse_expr()?));
1781 self.expect_keyword(Keyword::WHEN)?;
1782 }
1783 let mut conditions = vec![];
1784 let mut results = vec![];
1785 loop {
1786 conditions.push(self.parse_expr()?);
1787 self.expect_keyword(Keyword::THEN)?;
1788 results.push(self.parse_expr()?);
1789 if !self.parse_keyword(Keyword::WHEN) {
1790 break;
1791 }
1792 }
1793 let else_result = if self.parse_keyword(Keyword::ELSE) {
1794 Some(Box::new(self.parse_expr()?))
1795 } else {
1796 None
1797 };
1798 self.expect_keyword(Keyword::END)?;
1799 Ok(Expr::Case {
1800 operand,
1801 conditions,
1802 results,
1803 else_result,
1804 })
1805 }
1806
1807 pub fn parse_optional_cast_format(&mut self) -> Result<Option<CastFormat>, ParserError> {
1808 if self.parse_keyword(Keyword::FORMAT) {
1809 let value = self.parse_value()?;
1810 match self.parse_optional_time_zone()? {
1811 Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))),
1812 None => Ok(Some(CastFormat::Value(value))),
1813 }
1814 } else {
1815 Ok(None)
1816 }
1817 }
1818
1819 pub fn parse_optional_time_zone(&mut self) -> Result<Option<Value>, ParserError> {
1820 if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) {
1821 self.parse_value().map(Some)
1822 } else {
1823 Ok(None)
1824 }
1825 }
1826
1827 fn parse_mssql_convert(&mut self, is_try: bool) -> Result<Expr, ParserError> {
1829 self.expect_token(&Token::LParen)?;
1830 let data_type = self.parse_data_type()?;
1831 self.expect_token(&Token::Comma)?;
1832 let expr = self.parse_expr()?;
1833 let styles = if self.consume_token(&Token::Comma) {
1834 self.parse_comma_separated(Parser::parse_expr)?
1835 } else {
1836 Default::default()
1837 };
1838 self.expect_token(&Token::RParen)?;
1839 Ok(Expr::Convert {
1840 is_try,
1841 expr: Box::new(expr),
1842 data_type: Some(data_type),
1843 charset: None,
1844 target_before_value: true,
1845 styles,
1846 })
1847 }
1848
1849 pub fn parse_convert_expr(&mut self, is_try: bool) -> Result<Expr, ParserError> {
1854 if self.dialect.convert_type_before_value() {
1855 return self.parse_mssql_convert(is_try);
1856 }
1857 self.expect_token(&Token::LParen)?;
1858 let expr = self.parse_expr()?;
1859 if self.parse_keyword(Keyword::USING) {
1860 let charset = self.parse_object_name(false)?;
1861 self.expect_token(&Token::RParen)?;
1862 return Ok(Expr::Convert {
1863 is_try,
1864 expr: Box::new(expr),
1865 data_type: None,
1866 charset: Some(charset),
1867 target_before_value: false,
1868 styles: vec![],
1869 });
1870 }
1871 self.expect_token(&Token::Comma)?;
1872 let data_type = self.parse_data_type()?;
1873 let charset = if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
1874 Some(self.parse_object_name(false)?)
1875 } else {
1876 None
1877 };
1878 self.expect_token(&Token::RParen)?;
1879 Ok(Expr::Convert {
1880 is_try,
1881 expr: Box::new(expr),
1882 data_type: Some(data_type),
1883 charset,
1884 target_before_value: false,
1885 styles: vec![],
1886 })
1887 }
1888
1889 pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result<Expr, ParserError> {
1891 self.expect_token(&Token::LParen)?;
1892 let expr = self.parse_expr()?;
1893 self.expect_keyword(Keyword::AS)?;
1894 let data_type = self.parse_data_type()?;
1895 let format = self.parse_optional_cast_format()?;
1896 self.expect_token(&Token::RParen)?;
1897 Ok(Expr::Cast {
1898 kind,
1899 expr: Box::new(expr),
1900 data_type,
1901 format,
1902 })
1903 }
1904
1905 pub fn parse_exists_expr(&mut self, negated: bool) -> Result<Expr, ParserError> {
1907 self.expect_token(&Token::LParen)?;
1908 let exists_node = Expr::Exists {
1909 negated,
1910 subquery: self.parse_query()?,
1911 };
1912 self.expect_token(&Token::RParen)?;
1913 Ok(exists_node)
1914 }
1915
1916 pub fn parse_extract_expr(&mut self) -> Result<Expr, ParserError> {
1917 self.expect_token(&Token::LParen)?;
1918 let field = self.parse_date_time_field()?;
1919
1920 let syntax = if self.parse_keyword(Keyword::FROM) {
1921 ExtractSyntax::From
1922 } else if self.consume_token(&Token::Comma)
1923 && dialect_of!(self is SnowflakeDialect | GenericDialect)
1924 {
1925 ExtractSyntax::Comma
1926 } else {
1927 return Err(ParserError::ParserError(
1928 "Expected 'FROM' or ','".to_string(),
1929 ));
1930 };
1931
1932 let expr = self.parse_expr()?;
1933 self.expect_token(&Token::RParen)?;
1934 Ok(Expr::Extract {
1935 field,
1936 expr: Box::new(expr),
1937 syntax,
1938 })
1939 }
1940
1941 pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result<Expr, ParserError> {
1942 self.expect_token(&Token::LParen)?;
1943 let expr = self.parse_expr()?;
1944 let field = if self.parse_keyword(Keyword::TO) {
1946 CeilFloorKind::DateTimeField(self.parse_date_time_field()?)
1948 } else if self.consume_token(&Token::Comma) {
1949 match self.parse_value()? {
1951 Value::Number(n, s) => CeilFloorKind::Scale(Value::Number(n, s)),
1952 _ => {
1953 return Err(ParserError::ParserError(
1954 "Scale field can only be of number type".to_string(),
1955 ))
1956 }
1957 }
1958 } else {
1959 CeilFloorKind::DateTimeField(DateTimeField::NoDateTime)
1960 };
1961 self.expect_token(&Token::RParen)?;
1962 if is_ceil {
1963 Ok(Expr::Ceil {
1964 expr: Box::new(expr),
1965 field,
1966 })
1967 } else {
1968 Ok(Expr::Floor {
1969 expr: Box::new(expr),
1970 field,
1971 })
1972 }
1973 }
1974
1975 pub fn parse_position_expr(&mut self, ident: Ident) -> Result<Expr, ParserError> {
1976 let between_prec = self.dialect.prec_value(Precedence::Between);
1977 let position_expr = self.maybe_parse(|p| {
1978 p.expect_token(&Token::LParen)?;
1980
1981 let expr = p.parse_subexpr(between_prec)?;
1983 p.expect_keyword(Keyword::IN)?;
1984 let from = p.parse_expr()?;
1985 p.expect_token(&Token::RParen)?;
1986 Ok(Expr::Position {
1987 expr: Box::new(expr),
1988 r#in: Box::new(from),
1989 })
1990 })?;
1991 match position_expr {
1992 Some(expr) => Ok(expr),
1993 None => self.parse_function(ObjectName(vec![ident])),
1996 }
1997 }
1998
1999 pub fn parse_substring_expr(&mut self) -> Result<Expr, ParserError> {
2000 self.expect_token(&Token::LParen)?;
2002 let expr = self.parse_expr()?;
2003 let mut from_expr = None;
2004 let special = self.consume_token(&Token::Comma);
2005 if special || self.parse_keyword(Keyword::FROM) {
2006 from_expr = Some(self.parse_expr()?);
2007 }
2008
2009 let mut to_expr = None;
2010 if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) {
2011 to_expr = Some(self.parse_expr()?);
2012 }
2013 self.expect_token(&Token::RParen)?;
2014
2015 Ok(Expr::Substring {
2016 expr: Box::new(expr),
2017 substring_from: from_expr.map(Box::new),
2018 substring_for: to_expr.map(Box::new),
2019 special,
2020 })
2021 }
2022
2023 pub fn parse_overlay_expr(&mut self) -> Result<Expr, ParserError> {
2024 self.expect_token(&Token::LParen)?;
2026 let expr = self.parse_expr()?;
2027 self.expect_keyword(Keyword::PLACING)?;
2028 let what_expr = self.parse_expr()?;
2029 self.expect_keyword(Keyword::FROM)?;
2030 let from_expr = self.parse_expr()?;
2031 let mut for_expr = None;
2032 if self.parse_keyword(Keyword::FOR) {
2033 for_expr = Some(self.parse_expr()?);
2034 }
2035 self.expect_token(&Token::RParen)?;
2036
2037 Ok(Expr::Overlay {
2038 expr: Box::new(expr),
2039 overlay_what: Box::new(what_expr),
2040 overlay_from: Box::new(from_expr),
2041 overlay_for: for_expr.map(Box::new),
2042 })
2043 }
2044
2045 pub fn parse_trim_expr(&mut self) -> Result<Expr, ParserError> {
2051 self.expect_token(&Token::LParen)?;
2052 let mut trim_where = None;
2053 if let Token::Word(word) = self.peek_token().token {
2054 if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING]
2055 .iter()
2056 .any(|d| word.keyword == *d)
2057 {
2058 trim_where = Some(self.parse_trim_where()?);
2059 }
2060 }
2061 let expr = self.parse_expr()?;
2062 if self.parse_keyword(Keyword::FROM) {
2063 let trim_what = Box::new(expr);
2064 let expr = self.parse_expr()?;
2065 self.expect_token(&Token::RParen)?;
2066 Ok(Expr::Trim {
2067 expr: Box::new(expr),
2068 trim_where,
2069 trim_what: Some(trim_what),
2070 trim_characters: None,
2071 })
2072 } else if self.consume_token(&Token::Comma)
2073 && dialect_of!(self is SnowflakeDialect | BigQueryDialect | GenericDialect)
2074 {
2075 let characters = self.parse_comma_separated(Parser::parse_expr)?;
2076 self.expect_token(&Token::RParen)?;
2077 Ok(Expr::Trim {
2078 expr: Box::new(expr),
2079 trim_where: None,
2080 trim_what: None,
2081 trim_characters: Some(characters),
2082 })
2083 } else {
2084 self.expect_token(&Token::RParen)?;
2085 Ok(Expr::Trim {
2086 expr: Box::new(expr),
2087 trim_where,
2088 trim_what: None,
2089 trim_characters: None,
2090 })
2091 }
2092 }
2093
2094 pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
2095 let next_token = self.next_token();
2096 match &next_token.token {
2097 Token::Word(w) => match w.keyword {
2098 Keyword::BOTH => Ok(TrimWhereField::Both),
2099 Keyword::LEADING => Ok(TrimWhereField::Leading),
2100 Keyword::TRAILING => Ok(TrimWhereField::Trailing),
2101 _ => self.expected("trim_where field", next_token)?,
2102 },
2103 _ => self.expected("trim_where field", next_token),
2104 }
2105 }
2106
2107 pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
2110 let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
2111 self.expect_token(&Token::RBracket)?;
2112 Ok(Expr::Array(Array { elem: exprs, named }))
2113 }
2114
2115 pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
2116 if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
2117 if self.parse_keyword(Keyword::ERROR) {
2118 Ok(Some(ListAggOnOverflow::Error))
2119 } else {
2120 self.expect_keyword(Keyword::TRUNCATE)?;
2121 let filler = match self.peek_token().token {
2122 Token::Word(w)
2123 if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT =>
2124 {
2125 None
2126 }
2127 Token::SingleQuotedString(_)
2128 | Token::EscapedStringLiteral(_)
2129 | Token::UnicodeStringLiteral(_)
2130 | Token::NationalStringLiteral(_)
2131 | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)),
2132 _ => self.expected(
2133 "either filler, WITH, or WITHOUT in LISTAGG",
2134 self.peek_token(),
2135 )?,
2136 };
2137 let with_count = self.parse_keyword(Keyword::WITH);
2138 if !with_count && !self.parse_keyword(Keyword::WITHOUT) {
2139 self.expected("either WITH or WITHOUT in LISTAGG", self.peek_token())?;
2140 }
2141 self.expect_keyword(Keyword::COUNT)?;
2142 Ok(Some(ListAggOnOverflow::Truncate { filler, with_count }))
2143 }
2144 } else {
2145 Ok(None)
2146 }
2147 }
2148
2149 pub fn parse_date_time_field(&mut self) -> Result<DateTimeField, ParserError> {
2154 let next_token = self.next_token();
2155 match &next_token.token {
2156 Token::Word(w) => match w.keyword {
2157 Keyword::YEAR => Ok(DateTimeField::Year),
2158 Keyword::MONTH => Ok(DateTimeField::Month),
2159 Keyword::WEEK => {
2160 let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect)
2161 && self.consume_token(&Token::LParen)
2162 {
2163 let week_day = self.parse_identifier(false)?;
2164 self.expect_token(&Token::RParen)?;
2165 Some(week_day)
2166 } else {
2167 None
2168 };
2169 Ok(DateTimeField::Week(week_day))
2170 }
2171 Keyword::DAY => Ok(DateTimeField::Day),
2172 Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
2173 Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
2174 Keyword::DATE => Ok(DateTimeField::Date),
2175 Keyword::DATETIME => Ok(DateTimeField::Datetime),
2176 Keyword::HOUR => Ok(DateTimeField::Hour),
2177 Keyword::MINUTE => Ok(DateTimeField::Minute),
2178 Keyword::SECOND => Ok(DateTimeField::Second),
2179 Keyword::CENTURY => Ok(DateTimeField::Century),
2180 Keyword::DECADE => Ok(DateTimeField::Decade),
2181 Keyword::DOY => Ok(DateTimeField::Doy),
2182 Keyword::DOW => Ok(DateTimeField::Dow),
2183 Keyword::EPOCH => Ok(DateTimeField::Epoch),
2184 Keyword::ISODOW => Ok(DateTimeField::Isodow),
2185 Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
2186 Keyword::ISOWEEK => Ok(DateTimeField::IsoWeek),
2187 Keyword::JULIAN => Ok(DateTimeField::Julian),
2188 Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
2189 Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
2190 Keyword::MILLENIUM => Ok(DateTimeField::Millenium),
2191 Keyword::MILLENNIUM => Ok(DateTimeField::Millennium),
2192 Keyword::MILLISECOND => Ok(DateTimeField::Millisecond),
2193 Keyword::MILLISECONDS => Ok(DateTimeField::Milliseconds),
2194 Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
2195 Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
2196 Keyword::QUARTER => Ok(DateTimeField::Quarter),
2197 Keyword::TIME => Ok(DateTimeField::Time),
2198 Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
2199 Keyword::TIMEZONE_ABBR => Ok(DateTimeField::TimezoneAbbr),
2200 Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
2201 Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
2202 Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion),
2203 _ if self.dialect.allow_extract_custom() => {
2204 self.prev_token();
2205 let custom = self.parse_identifier(false)?;
2206 Ok(DateTimeField::Custom(custom))
2207 }
2208 _ => self.expected("date/time field", next_token),
2209 },
2210 Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => {
2211 self.prev_token();
2212 let custom = self.parse_identifier(false)?;
2213 Ok(DateTimeField::Custom(custom))
2214 }
2215 _ => self.expected("date/time field", next_token),
2216 }
2217 }
2218
2219 pub fn parse_not(&mut self) -> Result<Expr, ParserError> {
2220 match self.peek_token().token {
2221 Token::Word(w) => match w.keyword {
2222 Keyword::EXISTS => {
2223 let negated = true;
2224 let _ = self.parse_keyword(Keyword::EXISTS);
2225 self.parse_exists_expr(negated)
2226 }
2227 _ => Ok(Expr::UnaryOp {
2228 op: UnaryOperator::Not,
2229 expr: Box::new(
2230 self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?,
2231 ),
2232 }),
2233 },
2234 _ => Ok(Expr::UnaryOp {
2235 op: UnaryOperator::Not,
2236 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
2237 }),
2238 }
2239 }
2240
2241 fn parse_lbrace_expr(&mut self) -> Result<Expr, ParserError> {
2251 let token = self.expect_token(&Token::LBrace)?;
2252
2253 if let Some(fn_expr) = self.maybe_parse_odbc_fn_body()? {
2254 self.expect_token(&Token::RBrace)?;
2255 return Ok(fn_expr);
2256 }
2257
2258 if self.dialect.supports_dictionary_syntax() {
2259 self.prev_token(); return self.parse_duckdb_struct_literal();
2261 }
2262
2263 self.expected("an expression", token)
2264 }
2265
2266 pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
2272 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
2273
2274 self.expect_keyword(Keyword::AGAINST)?;
2275
2276 self.expect_token(&Token::LParen)?;
2277
2278 let match_value = self.parse_value()?;
2280
2281 let in_natural_language_mode_keywords = &[
2282 Keyword::IN,
2283 Keyword::NATURAL,
2284 Keyword::LANGUAGE,
2285 Keyword::MODE,
2286 ];
2287
2288 let with_query_expansion_keywords = &[Keyword::WITH, Keyword::QUERY, Keyword::EXPANSION];
2289
2290 let in_boolean_mode_keywords = &[Keyword::IN, Keyword::BOOLEAN, Keyword::MODE];
2291
2292 let opt_search_modifier = if self.parse_keywords(in_natural_language_mode_keywords) {
2293 if self.parse_keywords(with_query_expansion_keywords) {
2294 Some(SearchModifier::InNaturalLanguageModeWithQueryExpansion)
2295 } else {
2296 Some(SearchModifier::InNaturalLanguageMode)
2297 }
2298 } else if self.parse_keywords(in_boolean_mode_keywords) {
2299 Some(SearchModifier::InBooleanMode)
2300 } else if self.parse_keywords(with_query_expansion_keywords) {
2301 Some(SearchModifier::WithQueryExpansion)
2302 } else {
2303 None
2304 };
2305
2306 self.expect_token(&Token::RParen)?;
2307
2308 Ok(Expr::MatchAgainst {
2309 columns,
2310 match_value,
2311 opt_search_modifier,
2312 })
2313 }
2314
2315 pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
2331 let value = if self.dialect.require_interval_qualifier() {
2340 self.parse_expr()?
2342 } else {
2343 self.parse_prefix()?
2346 };
2347
2348 let leading_field = if self.next_token_is_temporal_unit() {
2354 Some(self.parse_date_time_field()?)
2355 } else if self.dialect.require_interval_qualifier() {
2356 return parser_err!(
2357 "INTERVAL requires a unit after the literal value",
2358 self.peek_token().span.start
2359 );
2360 } else {
2361 None
2362 };
2363
2364 let (leading_precision, last_field, fsec_precision) =
2365 if leading_field == Some(DateTimeField::Second) {
2366 let last_field = None;
2372 let (leading_precision, fsec_precision) = self.parse_optional_precision_scale()?;
2373 (leading_precision, last_field, fsec_precision)
2374 } else {
2375 let leading_precision = self.parse_optional_precision()?;
2376 if self.parse_keyword(Keyword::TO) {
2377 let last_field = Some(self.parse_date_time_field()?);
2378 let fsec_precision = if last_field == Some(DateTimeField::Second) {
2379 self.parse_optional_precision()?
2380 } else {
2381 None
2382 };
2383 (leading_precision, last_field, fsec_precision)
2384 } else {
2385 (leading_precision, None, None)
2386 }
2387 };
2388
2389 Ok(Expr::Interval(Interval {
2390 value: Box::new(value),
2391 leading_field,
2392 leading_precision,
2393 last_field,
2394 fractional_seconds_precision: fsec_precision,
2395 }))
2396 }
2397
2398 pub fn next_token_is_temporal_unit(&mut self) -> bool {
2401 if let Token::Word(word) = self.peek_token().token {
2402 matches!(
2403 word.keyword,
2404 Keyword::YEAR
2405 | Keyword::MONTH
2406 | Keyword::WEEK
2407 | Keyword::DAY
2408 | Keyword::HOUR
2409 | Keyword::MINUTE
2410 | Keyword::SECOND
2411 | Keyword::CENTURY
2412 | Keyword::DECADE
2413 | Keyword::DOW
2414 | Keyword::DOY
2415 | Keyword::EPOCH
2416 | Keyword::ISODOW
2417 | Keyword::ISOYEAR
2418 | Keyword::JULIAN
2419 | Keyword::MICROSECOND
2420 | Keyword::MICROSECONDS
2421 | Keyword::MILLENIUM
2422 | Keyword::MILLENNIUM
2423 | Keyword::MILLISECOND
2424 | Keyword::MILLISECONDS
2425 | Keyword::NANOSECOND
2426 | Keyword::NANOSECONDS
2427 | Keyword::QUARTER
2428 | Keyword::TIMEZONE
2429 | Keyword::TIMEZONE_HOUR
2430 | Keyword::TIMEZONE_MINUTE
2431 )
2432 } else {
2433 false
2434 }
2435 }
2436
2437 fn parse_struct_literal(&mut self) -> Result<Expr, ParserError> {
2445 self.prev_token();
2447 let (fields, trailing_bracket) =
2448 self.parse_struct_type_def(Self::parse_struct_field_def)?;
2449 if trailing_bracket.0 {
2450 return parser_err!(
2451 "unmatched > in STRUCT literal",
2452 self.peek_token().span.start
2453 );
2454 }
2455
2456 self.expect_token(&Token::LParen)?;
2458 let values = self
2459 .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?;
2460 self.expect_token(&Token::RParen)?;
2461
2462 Ok(Expr::Struct { values, fields })
2463 }
2464
2465 fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result<Expr, ParserError> {
2479 let expr = self.parse_expr()?;
2480 if self.parse_keyword(Keyword::AS) {
2481 if typed_syntax {
2482 return parser_err!("Typed syntax does not allow AS", {
2483 self.prev_token();
2484 self.peek_token().span.start
2485 });
2486 }
2487 let field_name = self.parse_identifier(false)?;
2488 Ok(Expr::Named {
2489 expr: expr.into(),
2490 name: field_name,
2491 })
2492 } else {
2493 Ok(expr)
2494 }
2495 }
2496
2497 fn parse_struct_type_def<F>(
2510 &mut self,
2511 mut elem_parser: F,
2512 ) -> Result<(Vec<StructField>, MatchedTrailingBracket), ParserError>
2513 where
2514 F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>,
2515 {
2516 let start_token = self.peek_token();
2517 self.expect_keyword(Keyword::STRUCT)?;
2518
2519 if Token::Lt != self.peek_token() {
2521 return Ok((Default::default(), false.into()));
2522 }
2523 self.next_token();
2524
2525 let mut field_defs = vec![];
2526 let trailing_bracket = loop {
2527 let (def, trailing_bracket) = elem_parser(self)?;
2528 field_defs.push(def);
2529 if !self.consume_token(&Token::Comma) {
2530 break trailing_bracket;
2531 }
2532
2533 if trailing_bracket.0 {
2537 return parser_err!("unmatched > in STRUCT definition", start_token.span.start);
2538 }
2539 };
2540
2541 Ok((
2542 field_defs,
2543 self.expect_closing_angle_bracket(trailing_bracket)?,
2544 ))
2545 }
2546
2547 fn parse_duckdb_struct_type_def(&mut self) -> Result<Vec<StructField>, ParserError> {
2549 self.expect_keyword(Keyword::STRUCT)?;
2550 self.expect_token(&Token::LParen)?;
2551 let struct_body = self.parse_comma_separated(|parser| {
2552 let field_name = parser.parse_identifier(false)?;
2553 let field_type = parser.parse_data_type()?;
2554
2555 Ok(StructField {
2556 field_name: Some(field_name),
2557 field_type,
2558 })
2559 });
2560 self.expect_token(&Token::RParen)?;
2561 struct_body
2562 }
2563
2564 fn parse_struct_field_def(
2574 &mut self,
2575 ) -> Result<(StructField, MatchedTrailingBracket), ParserError> {
2576 let is_anonymous_field = !matches!(
2579 (self.peek_nth_token(0).token, self.peek_nth_token(1).token),
2580 (Token::Word(_), Token::Word(_))
2581 );
2582
2583 let field_name = if is_anonymous_field {
2584 None
2585 } else {
2586 Some(self.parse_identifier(false)?)
2587 };
2588
2589 let (field_type, trailing_bracket) = self.parse_data_type_helper()?;
2590
2591 Ok((
2592 StructField {
2593 field_name,
2594 field_type,
2595 },
2596 trailing_bracket,
2597 ))
2598 }
2599
2600 fn parse_union_type_def(&mut self) -> Result<Vec<UnionField>, ParserError> {
2610 self.expect_keyword(Keyword::UNION)?;
2611
2612 self.expect_token(&Token::LParen)?;
2613
2614 let fields = self.parse_comma_separated(|p| {
2615 Ok(UnionField {
2616 field_name: p.parse_identifier(false)?,
2617 field_type: p.parse_data_type()?,
2618 })
2619 })?;
2620
2621 self.expect_token(&Token::RParen)?;
2622
2623 Ok(fields)
2624 }
2625
2626 fn parse_duckdb_struct_literal(&mut self) -> Result<Expr, ParserError> {
2636 self.expect_token(&Token::LBrace)?;
2637
2638 let fields = self.parse_comma_separated(Self::parse_duckdb_dictionary_field)?;
2639
2640 self.expect_token(&Token::RBrace)?;
2641
2642 Ok(Expr::Dictionary(fields))
2643 }
2644
2645 fn parse_duckdb_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
2655 let key = self.parse_identifier(false)?;
2656
2657 self.expect_token(&Token::Colon)?;
2658
2659 let expr = self.parse_expr()?;
2660
2661 Ok(DictionaryField {
2662 key,
2663 value: Box::new(expr),
2664 })
2665 }
2666
2667 fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
2677 self.expect_token(&Token::LBrace)?;
2678 let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?;
2679 self.expect_token(&Token::RBrace)?;
2680 Ok(Expr::Map(Map { entries: fields }))
2681 }
2682
2683 fn parse_duckdb_map_field(&mut self) -> Result<MapEntry, ParserError> {
2693 let key = self.parse_expr()?;
2694
2695 self.expect_token(&Token::Colon)?;
2696
2697 let value = self.parse_expr()?;
2698
2699 Ok(MapEntry {
2700 key: Box::new(key),
2701 value: Box::new(value),
2702 })
2703 }
2704
2705 fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> {
2715 self.expect_keyword(Keyword::MAP)?;
2716 self.expect_token(&Token::LParen)?;
2717 let key_data_type = self.parse_data_type()?;
2718 self.expect_token(&Token::Comma)?;
2719 let value_data_type = self.parse_data_type()?;
2720 self.expect_token(&Token::RParen)?;
2721
2722 Ok((key_data_type, value_data_type))
2723 }
2724
2725 fn parse_click_house_tuple_def(&mut self) -> Result<Vec<StructField>, ParserError> {
2735 self.expect_keyword(Keyword::TUPLE)?;
2736 self.expect_token(&Token::LParen)?;
2737 let mut field_defs = vec![];
2738 loop {
2739 let (def, _) = self.parse_struct_field_def()?;
2740 field_defs.push(def);
2741 if !self.consume_token(&Token::Comma) {
2742 break;
2743 }
2744 }
2745 self.expect_token(&Token::RParen)?;
2746
2747 Ok(field_defs)
2748 }
2749
2750 fn expect_closing_angle_bracket(
2755 &mut self,
2756 trailing_bracket: MatchedTrailingBracket,
2757 ) -> Result<MatchedTrailingBracket, ParserError> {
2758 let trailing_bracket = if !trailing_bracket.0 {
2759 match self.peek_token().token {
2760 Token::Gt => {
2761 self.next_token();
2762 false.into()
2763 }
2764 Token::ShiftRight => {
2765 self.next_token();
2766 true.into()
2767 }
2768 _ => return self.expected(">", self.peek_token()),
2769 }
2770 } else {
2771 false.into()
2772 };
2773
2774 Ok(trailing_bracket)
2775 }
2776
2777 pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result<Expr, ParserError> {
2779 if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) {
2781 return infix;
2782 }
2783
2784 let mut tok = self.next_token();
2785 let regular_binary_operator = match &mut tok.token {
2786 Token::Spaceship => Some(BinaryOperator::Spaceship),
2787 Token::DoubleEq => Some(BinaryOperator::Eq),
2788 Token::Eq => Some(BinaryOperator::Eq),
2789 Token::Neq => Some(BinaryOperator::NotEq),
2790 Token::Gt => Some(BinaryOperator::Gt),
2791 Token::GtEq => Some(BinaryOperator::GtEq),
2792 Token::Lt => Some(BinaryOperator::Lt),
2793 Token::LtEq => Some(BinaryOperator::LtEq),
2794 Token::Plus => Some(BinaryOperator::Plus),
2795 Token::Minus => Some(BinaryOperator::Minus),
2796 Token::Mul => Some(BinaryOperator::Multiply),
2797 Token::Mod => Some(BinaryOperator::Modulo),
2798 Token::StringConcat => Some(BinaryOperator::StringConcat),
2799 Token::Pipe => Some(BinaryOperator::BitwiseOr),
2800 Token::Caret => {
2801 if dialect_of!(self is PostgreSqlDialect) {
2804 Some(BinaryOperator::PGExp)
2805 } else {
2806 Some(BinaryOperator::BitwiseXor)
2807 }
2808 }
2809 Token::Ampersand => Some(BinaryOperator::BitwiseAnd),
2810 Token::Div => Some(BinaryOperator::Divide),
2811 Token::DuckIntDiv if dialect_of!(self is DuckDbDialect | GenericDialect) => {
2812 Some(BinaryOperator::DuckIntegerDivide)
2813 }
2814 Token::ShiftLeft if dialect_of!(self is PostgreSqlDialect | DuckDbDialect | GenericDialect) => {
2815 Some(BinaryOperator::PGBitwiseShiftLeft)
2816 }
2817 Token::ShiftRight if dialect_of!(self is PostgreSqlDialect | DuckDbDialect | GenericDialect) => {
2818 Some(BinaryOperator::PGBitwiseShiftRight)
2819 }
2820 Token::Sharp if dialect_of!(self is PostgreSqlDialect) => {
2821 Some(BinaryOperator::PGBitwiseXor)
2822 }
2823 Token::Overlap if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
2824 Some(BinaryOperator::PGOverlap)
2825 }
2826 Token::CaretAt if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
2827 Some(BinaryOperator::PGStartsWith)
2828 }
2829 Token::Tilde => Some(BinaryOperator::PGRegexMatch),
2830 Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch),
2831 Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch),
2832 Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch),
2833 Token::DoubleTilde => Some(BinaryOperator::PGLikeMatch),
2834 Token::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch),
2835 Token::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch),
2836 Token::ExclamationMarkDoubleTildeAsterisk => Some(BinaryOperator::PGNotILikeMatch),
2837 Token::Arrow => Some(BinaryOperator::Arrow),
2838 Token::LongArrow => Some(BinaryOperator::LongArrow),
2839 Token::HashArrow => Some(BinaryOperator::HashArrow),
2840 Token::HashLongArrow => Some(BinaryOperator::HashLongArrow),
2841 Token::AtArrow => Some(BinaryOperator::AtArrow),
2842 Token::ArrowAt => Some(BinaryOperator::ArrowAt),
2843 Token::HashMinus => Some(BinaryOperator::HashMinus),
2844 Token::AtQuestion => Some(BinaryOperator::AtQuestion),
2845 Token::AtAt => Some(BinaryOperator::AtAt),
2846 Token::Question => Some(BinaryOperator::Question),
2847 Token::QuestionAnd => Some(BinaryOperator::QuestionAnd),
2848 Token::QuestionPipe => Some(BinaryOperator::QuestionPipe),
2849 Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(core::mem::take(s))),
2850
2851 Token::Word(w) => match w.keyword {
2852 Keyword::AND => Some(BinaryOperator::And),
2853 Keyword::OR => Some(BinaryOperator::Or),
2854 Keyword::XOR => Some(BinaryOperator::Xor),
2855 Keyword::OPERATOR if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
2856 self.expect_token(&Token::LParen)?;
2857 let mut idents = vec![];
2862 loop {
2863 idents.push(self.next_token().to_string());
2864 if !self.consume_token(&Token::Period) {
2865 break;
2866 }
2867 }
2868 self.expect_token(&Token::RParen)?;
2869 Some(BinaryOperator::PGCustomBinaryOperator(idents))
2870 }
2871 _ => None,
2872 },
2873 _ => None,
2874 };
2875
2876 if let Some(op) = regular_binary_operator {
2877 if let Some(keyword) =
2878 self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME])
2879 {
2880 self.expect_token(&Token::LParen)?;
2881 let right = if self.peek_sub_query() {
2882 self.prev_token(); self.parse_subexpr(precedence)?
2886 } else {
2887 let right = self.parse_subexpr(precedence)?;
2889 self.expect_token(&Token::RParen)?;
2890 right
2891 };
2892
2893 if !dialect_of!(self is PostgreSqlDialect) && !matches!(
2894 op,
2895 BinaryOperator::Gt
2896 | BinaryOperator::Lt
2897 | BinaryOperator::GtEq
2898 | BinaryOperator::LtEq
2899 | BinaryOperator::Eq
2900 | BinaryOperator::NotEq
2901 ) {
2902 return parser_err!(
2903 format!(
2904 "Expected one of [=, >, <, =>, =<, !=] as comparison operator, found: {op}"
2905 ),
2906 tok.span.start
2907 );
2908 };
2909
2910 Ok(match keyword {
2911 Keyword::ALL => Expr::AllOp {
2912 left: Box::new(expr),
2913 compare_op: op,
2914 right: Box::new(right),
2915 },
2916 Keyword::ANY | Keyword::SOME => Expr::AnyOp {
2917 left: Box::new(expr),
2918 compare_op: op,
2919 right: Box::new(right),
2920 is_some: keyword == Keyword::SOME,
2921 },
2922 _ => unreachable!(),
2923 })
2924 } else {
2925 Ok(Expr::BinaryOp {
2926 left: Box::new(expr),
2927 op,
2928 right: Box::new(self.parse_subexpr(precedence)?),
2929 })
2930 }
2931 } else if let Token::Word(w) = &tok.token {
2932 match w.keyword {
2933 Keyword::IS => {
2934 if self.parse_keyword(Keyword::NULL) {
2935 Ok(Expr::IsNull(Box::new(expr)))
2936 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
2937 Ok(Expr::IsNotNull(Box::new(expr)))
2938 } else if self.parse_keywords(&[Keyword::TRUE]) {
2939 Ok(Expr::IsTrue(Box::new(expr)))
2940 } else if self.parse_keywords(&[Keyword::NOT, Keyword::TRUE]) {
2941 Ok(Expr::IsNotTrue(Box::new(expr)))
2942 } else if self.parse_keywords(&[Keyword::FALSE]) {
2943 Ok(Expr::IsFalse(Box::new(expr)))
2944 } else if self.parse_keywords(&[Keyword::NOT, Keyword::FALSE]) {
2945 Ok(Expr::IsNotFalse(Box::new(expr)))
2946 } else if self.parse_keywords(&[Keyword::UNKNOWN]) {
2947 Ok(Expr::IsUnknown(Box::new(expr)))
2948 } else if self.parse_keywords(&[Keyword::NOT, Keyword::UNKNOWN]) {
2949 Ok(Expr::IsNotUnknown(Box::new(expr)))
2950 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::FROM]) {
2951 let expr2 = self.parse_expr()?;
2952 Ok(Expr::IsDistinctFrom(Box::new(expr), Box::new(expr2)))
2953 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DISTINCT, Keyword::FROM])
2954 {
2955 let expr2 = self.parse_expr()?;
2956 Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
2957 } else {
2958 self.expected(
2959 "[NOT] NULL or TRUE|FALSE or [NOT] DISTINCT FROM after IS",
2960 self.peek_token(),
2961 )
2962 }
2963 }
2964 Keyword::AT => {
2965 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
2966 Ok(Expr::AtTimeZone {
2967 timestamp: Box::new(expr),
2968 time_zone: Box::new(self.parse_subexpr(precedence)?),
2969 })
2970 }
2971 Keyword::NOT
2972 | Keyword::IN
2973 | Keyword::BETWEEN
2974 | Keyword::LIKE
2975 | Keyword::ILIKE
2976 | Keyword::SIMILAR
2977 | Keyword::REGEXP
2978 | Keyword::RLIKE => {
2979 self.prev_token();
2980 let negated = self.parse_keyword(Keyword::NOT);
2981 let regexp = self.parse_keyword(Keyword::REGEXP);
2982 let rlike = self.parse_keyword(Keyword::RLIKE);
2983 if regexp || rlike {
2984 Ok(Expr::RLike {
2985 negated,
2986 expr: Box::new(expr),
2987 pattern: Box::new(
2988 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
2989 ),
2990 regexp,
2991 })
2992 } else if self.parse_keyword(Keyword::IN) {
2993 self.parse_in(expr, negated)
2994 } else if self.parse_keyword(Keyword::BETWEEN) {
2995 self.parse_between(expr, negated)
2996 } else if self.parse_keyword(Keyword::LIKE) {
2997 Ok(Expr::Like {
2998 negated,
2999 any: self.parse_keyword(Keyword::ANY),
3000 expr: Box::new(expr),
3001 pattern: Box::new(
3002 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3003 ),
3004 escape_char: self.parse_escape_char()?,
3005 })
3006 } else if self.parse_keyword(Keyword::ILIKE) {
3007 Ok(Expr::ILike {
3008 negated,
3009 any: self.parse_keyword(Keyword::ANY),
3010 expr: Box::new(expr),
3011 pattern: Box::new(
3012 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3013 ),
3014 escape_char: self.parse_escape_char()?,
3015 })
3016 } else if self.parse_keywords(&[Keyword::SIMILAR, Keyword::TO]) {
3017 Ok(Expr::SimilarTo {
3018 negated,
3019 expr: Box::new(expr),
3020 pattern: Box::new(
3021 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3022 ),
3023 escape_char: self.parse_escape_char()?,
3024 })
3025 } else {
3026 self.expected("IN or BETWEEN after NOT", self.peek_token())
3027 }
3028 }
3029 _ => parser_err!(
3031 format!("No infix parser for token {:?}", tok.token),
3032 tok.span.start
3033 ),
3034 }
3035 } else if Token::DoubleColon == tok {
3036 Ok(Expr::Cast {
3037 kind: CastKind::DoubleColon,
3038 expr: Box::new(expr),
3039 data_type: self.parse_data_type()?,
3040 format: None,
3041 })
3042 } else if Token::ExclamationMark == tok && self.dialect.supports_factorial_operator() {
3043 Ok(Expr::UnaryOp {
3044 op: UnaryOperator::PGPostfixFactorial,
3045 expr: Box::new(expr),
3046 })
3047 } else if Token::LBracket == tok {
3048 if dialect_of!(self is PostgreSqlDialect | DuckDbDialect | GenericDialect) {
3049 self.parse_subscript(expr)
3050 } else if dialect_of!(self is SnowflakeDialect) || self.dialect.supports_partiql() {
3051 self.prev_token();
3052 self.parse_json_access(expr)
3053 } else {
3054 self.parse_map_access(expr)
3055 }
3056 } else if dialect_of!(self is SnowflakeDialect | GenericDialect) && Token::Colon == tok {
3057 self.prev_token();
3058 self.parse_json_access(expr)
3059 } else {
3060 parser_err!(
3062 format!("No infix parser for token {:?}", tok.token),
3063 tok.span.start
3064 )
3065 }
3066 }
3067
3068 pub fn parse_escape_char(&mut self) -> Result<Option<String>, ParserError> {
3070 if self.parse_keyword(Keyword::ESCAPE) {
3071 Ok(Some(self.parse_literal_string()?))
3072 } else {
3073 Ok(None)
3074 }
3075 }
3076
3077 fn parse_subscript_inner(&mut self) -> Result<Subscript, ParserError> {
3087 let lower_bound = if self.consume_token(&Token::Colon) {
3089 None
3090 } else {
3091 Some(self.parse_expr()?)
3092 };
3093
3094 if self.consume_token(&Token::RBracket) {
3096 if let Some(lower_bound) = lower_bound {
3097 return Ok(Subscript::Index { index: lower_bound });
3098 };
3099 return Ok(Subscript::Slice {
3100 lower_bound,
3101 upper_bound: None,
3102 stride: None,
3103 });
3104 }
3105
3106 if lower_bound.is_some() {
3108 self.expect_token(&Token::Colon)?;
3109 }
3110
3111 let upper_bound = if self.consume_token(&Token::RBracket) {
3113 return Ok(Subscript::Slice {
3114 lower_bound,
3115 upper_bound: None,
3116 stride: None,
3117 });
3118 } else {
3119 Some(self.parse_expr()?)
3120 };
3121
3122 if self.consume_token(&Token::RBracket) {
3124 return Ok(Subscript::Slice {
3125 lower_bound,
3126 upper_bound,
3127 stride: None,
3128 });
3129 }
3130
3131 self.expect_token(&Token::Colon)?;
3133 let stride = if self.consume_token(&Token::RBracket) {
3134 None
3135 } else {
3136 Some(self.parse_expr()?)
3137 };
3138
3139 if stride.is_some() {
3140 self.expect_token(&Token::RBracket)?;
3141 }
3142
3143 Ok(Subscript::Slice {
3144 lower_bound,
3145 upper_bound,
3146 stride,
3147 })
3148 }
3149
3150 pub fn parse_subscript(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3154 let subscript = self.parse_subscript_inner()?;
3155 Ok(Expr::Subscript {
3156 expr: Box::new(expr),
3157 subscript: Box::new(subscript),
3158 })
3159 }
3160
3161 fn parse_json_path_object_key(&mut self) -> Result<JsonPathElem, ParserError> {
3162 let token = self.next_token();
3163 match token.token {
3164 Token::Word(Word {
3165 value,
3166 quote_style: quote_style @ (Some('"') | None),
3168 keyword: _,
3171 }) => Ok(JsonPathElem::Dot {
3172 key: value,
3173 quoted: quote_style.is_some(),
3174 }),
3175
3176 Token::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }),
3180
3181 _ => self.expected("variant object key name", token),
3182 }
3183 }
3184
3185 fn parse_json_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3186 let path = self.parse_json_path()?;
3187 Ok(Expr::JsonAccess {
3188 value: Box::new(expr),
3189 path,
3190 })
3191 }
3192
3193 fn parse_json_path(&mut self) -> Result<JsonPath, ParserError> {
3194 let mut path = Vec::new();
3195 loop {
3196 match self.next_token().token {
3197 Token::Colon if path.is_empty() => {
3198 path.push(self.parse_json_path_object_key()?);
3199 }
3200 Token::Period if !path.is_empty() => {
3201 path.push(self.parse_json_path_object_key()?);
3202 }
3203 Token::LBracket => {
3204 let key = self.parse_expr()?;
3205 self.expect_token(&Token::RBracket)?;
3206
3207 path.push(JsonPathElem::Bracket { key });
3208 }
3209 _ => {
3210 self.prev_token();
3211 break;
3212 }
3213 };
3214 }
3215
3216 debug_assert!(!path.is_empty());
3217 Ok(JsonPath { path })
3218 }
3219
3220 pub fn parse_map_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3221 let key = self.parse_expr()?;
3222 self.expect_token(&Token::RBracket)?;
3223
3224 let mut keys = vec![MapAccessKey {
3225 key,
3226 syntax: MapAccessSyntax::Bracket,
3227 }];
3228 loop {
3229 let key = match self.peek_token().token {
3230 Token::LBracket => {
3231 self.next_token(); let key = self.parse_expr()?;
3233 self.expect_token(&Token::RBracket)?;
3234 MapAccessKey {
3235 key,
3236 syntax: MapAccessSyntax::Bracket,
3237 }
3238 }
3239 Token::Period if dialect_of!(self is BigQueryDialect) => {
3243 self.next_token(); MapAccessKey {
3245 key: self.parse_expr()?,
3246 syntax: MapAccessSyntax::Period,
3247 }
3248 }
3249 _ => break,
3250 };
3251 keys.push(key);
3252 }
3253
3254 Ok(Expr::MapAccess {
3255 column: Box::new(expr),
3256 keys,
3257 })
3258 }
3259
3260 pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
3262 if self.parse_keyword(Keyword::UNNEST) {
3265 self.expect_token(&Token::LParen)?;
3266 let array_expr = self.parse_expr()?;
3267 self.expect_token(&Token::RParen)?;
3268 return Ok(Expr::InUnnest {
3269 expr: Box::new(expr),
3270 array_expr: Box::new(array_expr),
3271 negated,
3272 });
3273 }
3274 self.expect_token(&Token::LParen)?;
3275 let in_op = if self.parse_keyword(Keyword::SELECT) || self.parse_keyword(Keyword::WITH) {
3276 self.prev_token();
3277 Expr::InSubquery {
3278 expr: Box::new(expr),
3279 subquery: self.parse_query()?,
3280 negated,
3281 }
3282 } else {
3283 Expr::InList {
3284 expr: Box::new(expr),
3285 list: if self.dialect.supports_in_empty_list() {
3286 self.parse_comma_separated0(Parser::parse_expr, Token::RParen)?
3287 } else {
3288 self.parse_comma_separated(Parser::parse_expr)?
3289 },
3290 negated,
3291 }
3292 };
3293 self.expect_token(&Token::RParen)?;
3294 Ok(in_op)
3295 }
3296
3297 pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
3299 let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
3302 self.expect_keyword(Keyword::AND)?;
3303 let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
3304 Ok(Expr::Between {
3305 expr: Box::new(expr),
3306 negated,
3307 low: Box::new(low),
3308 high: Box::new(high),
3309 })
3310 }
3311
3312 pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3314 Ok(Expr::Cast {
3315 kind: CastKind::DoubleColon,
3316 expr: Box::new(expr),
3317 data_type: self.parse_data_type()?,
3318 format: None,
3319 })
3320 }
3321
3322 pub fn get_next_precedence(&self) -> Result<u8, ParserError> {
3324 self.dialect.get_next_precedence_default(self)
3325 }
3326
3327 pub fn peek_token(&self) -> TokenWithSpan {
3330 self.peek_nth_token(0)
3331 }
3332
3333 pub fn peek_tokens<const N: usize>(&self) -> [Token; N] {
3356 self.peek_tokens_with_location()
3357 .map(|with_loc| with_loc.token)
3358 }
3359
3360 pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithSpan; N] {
3365 let mut index = self.index;
3366 core::array::from_fn(|_| loop {
3367 let token = self.tokens.get(index);
3368 index += 1;
3369 if let Some(TokenWithSpan {
3370 token: Token::Whitespace(_),
3371 span: _,
3372 }) = token
3373 {
3374 continue;
3375 }
3376 break token.cloned().unwrap_or(TokenWithSpan {
3377 token: Token::EOF,
3378 span: Span::empty(),
3379 });
3380 })
3381 }
3382
3383 pub fn peek_nth_token(&self, mut n: usize) -> TokenWithSpan {
3385 let mut index = self.index;
3386 loop {
3387 index += 1;
3388 match self.tokens.get(index - 1) {
3389 Some(TokenWithSpan {
3390 token: Token::Whitespace(_),
3391 span: _,
3392 }) => continue,
3393 non_whitespace => {
3394 if n == 0 {
3395 return non_whitespace.cloned().unwrap_or(TokenWithSpan {
3396 token: Token::EOF,
3397 span: Span::empty(),
3398 });
3399 }
3400 n -= 1;
3401 }
3402 }
3403 }
3404 }
3405
3406 pub fn peek_token_no_skip(&self) -> TokenWithSpan {
3409 self.peek_nth_token_no_skip(0)
3410 }
3411
3412 pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan {
3414 self.tokens
3415 .get(self.index + n)
3416 .cloned()
3417 .unwrap_or(TokenWithSpan {
3418 token: Token::EOF,
3419 span: Span::empty(),
3420 })
3421 }
3422
3423 fn peek_keywords(&mut self, expected: &[Keyword]) -> bool {
3425 let index = self.index;
3426 let matched = self.parse_keywords(expected);
3427 self.index = index;
3428 matched
3429 }
3430
3431 pub fn next_token(&mut self) -> TokenWithSpan {
3435 loop {
3436 self.index += 1;
3437 match self.tokens.get(self.index - 1) {
3438 Some(TokenWithSpan {
3439 token: Token::Whitespace(_),
3440 span: _,
3441 }) => continue,
3442 token => {
3443 return token
3444 .cloned()
3445 .unwrap_or_else(|| TokenWithSpan::wrap(Token::EOF))
3446 }
3447 }
3448 }
3449 }
3450
3451 pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> {
3453 self.index += 1;
3454 self.tokens.get(self.index - 1)
3455 }
3456
3457 pub fn prev_token(&mut self) {
3461 loop {
3462 assert!(self.index > 0);
3463 self.index -= 1;
3464 if let Some(TokenWithSpan {
3465 token: Token::Whitespace(_),
3466 span: _,
3467 }) = self.tokens.get(self.index)
3468 {
3469 continue;
3470 }
3471 return;
3472 }
3473 }
3474
3475 pub fn expected<T>(&self, expected: &str, found: TokenWithSpan) -> Result<T, ParserError> {
3477 parser_err!(
3478 format!("Expected: {expected}, found: {found}"),
3479 found.span.start
3480 )
3481 }
3482
3483 #[must_use]
3486 pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
3487 self.parse_keyword_token(expected).is_some()
3488 }
3489
3490 #[must_use]
3491 pub fn parse_keyword_token(&mut self, expected: Keyword) -> Option<TokenWithSpan> {
3492 match self.peek_token().token {
3493 Token::Word(w) if expected == w.keyword => Some(self.next_token()),
3494 _ => None,
3495 }
3496 }
3497
3498 #[must_use]
3499 pub fn peek_keyword(&mut self, expected: Keyword) -> bool {
3500 matches!(self.peek_token().token, Token::Word(w) if expected == w.keyword)
3501 }
3502
3503 pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
3511 match self.peek_token().token {
3512 Token::Word(w) if expected == w.keyword => {
3513 for (idx, token) in tokens.iter().enumerate() {
3514 if self.peek_nth_token(idx + 1).token != *token {
3515 return false;
3516 }
3517 }
3518 for _ in 0..(tokens.len() + 1) {
3520 self.next_token();
3521 }
3522 true
3523 }
3524 _ => false,
3525 }
3526 }
3527
3528 #[must_use]
3532 pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
3533 let index = self.index;
3534 for &keyword in keywords {
3535 if !self.parse_keyword(keyword) {
3536 self.index = index;
3539 return false;
3540 }
3541 }
3542 true
3543 }
3544
3545 #[must_use]
3549 pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
3550 match self.peek_token().token {
3551 Token::Word(w) => {
3552 keywords
3553 .iter()
3554 .find(|keyword| **keyword == w.keyword)
3555 .map(|keyword| {
3556 self.next_token();
3557 *keyword
3558 })
3559 }
3560 _ => None,
3561 }
3562 }
3563
3564 pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result<Keyword, ParserError> {
3567 if let Some(keyword) = self.parse_one_of_keywords(keywords) {
3568 Ok(keyword)
3569 } else {
3570 let keywords: Vec<String> = keywords.iter().map(|x| format!("{x:?}")).collect();
3571 self.expected(
3572 &format!("one of {}", keywords.join(" or ")),
3573 self.peek_token(),
3574 )
3575 }
3576 }
3577
3578 pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithSpan, ParserError> {
3581 if let Some(token) = self.parse_keyword_token(expected) {
3582 Ok(token)
3583 } else {
3584 self.expected(format!("{:?}", &expected).as_str(), self.peek_token())
3585 }
3586 }
3587
3588 pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> {
3591 for &kw in expected {
3592 self.expect_keyword(kw)?;
3593 }
3594 Ok(())
3595 }
3596
3597 #[must_use]
3599 pub fn consume_token(&mut self, expected: &Token) -> bool {
3600 if self.peek_token() == *expected {
3601 self.next_token();
3602 true
3603 } else {
3604 false
3605 }
3606 }
3607
3608 #[must_use]
3612 pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool {
3613 let index = self.index;
3614 for token in tokens {
3615 if !self.consume_token(token) {
3616 self.index = index;
3617 return false;
3618 }
3619 }
3620 true
3621 }
3622
3623 pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithSpan, ParserError> {
3625 if self.peek_token() == *expected {
3626 Ok(self.next_token())
3627 } else {
3628 self.expected(&expected.to_string(), self.peek_token())
3629 }
3630 }
3631
3632 fn parse<T: FromStr>(s: String, loc: Location) -> Result<T, ParserError>
3633 where
3634 <T as FromStr>::Err: Display,
3635 {
3636 s.parse::<T>().map_err(|e| {
3637 ParserError::ParserError(format!(
3638 "Could not parse '{s}' as {}: {e}{loc}",
3639 core::any::type_name::<T>()
3640 ))
3641 })
3642 }
3643
3644 pub fn parse_projection(&mut self) -> Result<Vec<SelectItem>, ParserError> {
3646 let trailing_commas =
3652 self.options.trailing_commas | self.dialect.supports_projection_trailing_commas();
3653
3654 self.parse_comma_separated_with_trailing_commas(|p| p.parse_select_item(), trailing_commas)
3655 }
3656
3657 pub fn parse_actions_list(&mut self) -> Result<Vec<ParsedAction>, ParserError> {
3658 let mut values = vec![];
3659 loop {
3660 values.push(self.parse_grant_permission()?);
3661 if !self.consume_token(&Token::Comma) {
3662 break;
3663 } else if self.options.trailing_commas {
3664 match self.peek_token().token {
3665 Token::Word(kw) if kw.keyword == Keyword::ON => {
3666 break;
3667 }
3668 Token::RParen
3669 | Token::SemiColon
3670 | Token::EOF
3671 | Token::RBracket
3672 | Token::RBrace => break,
3673 _ => continue,
3674 }
3675 }
3676 }
3677 Ok(values)
3678 }
3679
3680 fn is_parse_comma_separated_end_with_trailing_commas(&mut self, trailing_commas: bool) -> bool {
3684 if !self.consume_token(&Token::Comma) {
3685 true
3686 } else if trailing_commas {
3687 let token = self.peek_token().token;
3688 match token {
3689 Token::Word(ref kw)
3690 if keywords::RESERVED_FOR_COLUMN_ALIAS.contains(&kw.keyword) =>
3691 {
3692 true
3693 }
3694 Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => {
3695 true
3696 }
3697 _ => false,
3698 }
3699 } else {
3700 false
3701 }
3702 }
3703
3704 fn is_parse_comma_separated_end(&mut self) -> bool {
3707 self.is_parse_comma_separated_end_with_trailing_commas(self.options.trailing_commas)
3708 }
3709
3710 pub fn parse_comma_separated<T, F>(&mut self, f: F) -> Result<Vec<T>, ParserError>
3712 where
3713 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
3714 {
3715 self.parse_comma_separated_with_trailing_commas(f, self.options.trailing_commas)
3716 }
3717
3718 fn parse_comma_separated_with_trailing_commas<T, F>(
3721 &mut self,
3722 mut f: F,
3723 trailing_commas: bool,
3724 ) -> Result<Vec<T>, ParserError>
3725 where
3726 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
3727 {
3728 let mut values = vec![];
3729 loop {
3730 values.push(f(self)?);
3731 if self.is_parse_comma_separated_end_with_trailing_commas(trailing_commas) {
3732 break;
3733 }
3734 }
3735 Ok(values)
3736 }
3737
3738 pub fn parse_keyword_separated<T, F>(
3740 &mut self,
3741 keyword: Keyword,
3742 mut f: F,
3743 ) -> Result<Vec<T>, ParserError>
3744 where
3745 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
3746 {
3747 let mut values = vec![];
3748 loop {
3749 values.push(f(self)?);
3750 if !self.parse_keyword(keyword) {
3751 break;
3752 }
3753 }
3754 Ok(values)
3755 }
3756
3757 pub fn parse_parenthesized<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
3758 where
3759 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
3760 {
3761 self.expect_token(&Token::LParen)?;
3762 let res = f(self)?;
3763 self.expect_token(&Token::RParen)?;
3764 Ok(res)
3765 }
3766
3767 pub fn parse_comma_separated0<T, F>(
3770 &mut self,
3771 f: F,
3772 end_token: Token,
3773 ) -> Result<Vec<T>, ParserError>
3774 where
3775 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
3776 {
3777 if self.peek_token().token == end_token {
3778 return Ok(vec![]);
3779 }
3780
3781 if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] {
3782 let _ = self.consume_token(&Token::Comma);
3783 return Ok(vec![]);
3784 }
3785
3786 self.parse_comma_separated(f)
3787 }
3788
3789 pub fn maybe_parse<T, F>(&mut self, f: F) -> Result<Option<T>, ParserError>
3792 where
3793 F: FnMut(&mut Parser) -> Result<T, ParserError>,
3794 {
3795 match self.try_parse(f) {
3796 Ok(t) => Ok(Some(t)),
3797 Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded),
3798 _ => Ok(None),
3799 }
3800 }
3801
3802 pub fn try_parse<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
3804 where
3805 F: FnMut(&mut Parser) -> Result<T, ParserError>,
3806 {
3807 let index = self.index;
3808 match f(self) {
3809 Ok(t) => Ok(t),
3810 Err(e) => {
3811 self.index = index;
3813 Err(e)
3814 }
3815 }
3816 }
3817
3818 pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
3821 let loc = self.peek_token().span.start;
3822 let all = self.parse_keyword(Keyword::ALL);
3823 let distinct = self.parse_keyword(Keyword::DISTINCT);
3824 if !distinct {
3825 return Ok(None);
3826 }
3827 if all {
3828 return parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc);
3829 }
3830 let on = self.parse_keyword(Keyword::ON);
3831 if !on {
3832 return Ok(Some(Distinct::Distinct));
3833 }
3834
3835 self.expect_token(&Token::LParen)?;
3836 let col_names = if self.consume_token(&Token::RParen) {
3837 self.prev_token();
3838 Vec::new()
3839 } else {
3840 self.parse_comma_separated(Parser::parse_expr)?
3841 };
3842 self.expect_token(&Token::RParen)?;
3843 Ok(Some(Distinct::On(col_names)))
3844 }
3845
3846 pub fn parse_create(&mut self) -> Result<Statement, ParserError> {
3848 let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
3849 let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]);
3850 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
3851 let global = self.parse_one_of_keywords(&[Keyword::GLOBAL]).is_some();
3852 let transient = self.parse_one_of_keywords(&[Keyword::TRANSIENT]).is_some();
3853 let global: Option<bool> = if global {
3854 Some(true)
3855 } else if local {
3856 Some(false)
3857 } else {
3858 None
3859 };
3860 let temporary = self
3861 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
3862 .is_some();
3863 let persistent = dialect_of!(self is DuckDbDialect)
3864 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
3865 if self.parse_keyword(Keyword::TABLE) {
3866 self.parse_create_table(or_replace, temporary, global, transient)
3867 } else if self.parse_keyword(Keyword::MATERIALIZED) || self.parse_keyword(Keyword::VIEW) {
3868 self.prev_token();
3869 self.parse_create_view(or_replace, temporary)
3870 } else if self.parse_keyword(Keyword::POLICY) {
3871 self.parse_create_policy()
3872 } else if self.parse_keyword(Keyword::EXTERNAL) {
3873 self.parse_create_external_table(or_replace)
3874 } else if self.parse_keyword(Keyword::FUNCTION) {
3875 self.parse_create_function(or_replace, temporary)
3876 } else if self.parse_keyword(Keyword::TRIGGER) {
3877 self.parse_create_trigger(or_replace, false)
3878 } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) {
3879 self.parse_create_trigger(or_replace, true)
3880 } else if self.parse_keyword(Keyword::MACRO) {
3881 self.parse_create_macro(or_replace, temporary)
3882 } else if self.parse_keyword(Keyword::SECRET) {
3883 self.parse_create_secret(or_replace, temporary, persistent)
3884 } else if or_replace {
3885 self.expected(
3886 "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION after CREATE OR REPLACE",
3887 self.peek_token(),
3888 )
3889 } else if self.parse_keyword(Keyword::EXTENSION) {
3890 self.parse_create_extension()
3891 } else if self.parse_keyword(Keyword::INDEX) {
3892 self.parse_create_index(false)
3893 } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
3894 self.parse_create_index(true)
3895 } else if self.parse_keyword(Keyword::VIRTUAL) {
3896 self.parse_create_virtual_table()
3897 } else if self.parse_keyword(Keyword::SCHEMA) {
3898 self.parse_create_schema()
3899 } else if self.parse_keyword(Keyword::DATABASE) {
3900 self.parse_create_database()
3901 } else if self.parse_keyword(Keyword::ROLE) {
3902 self.parse_create_role()
3903 } else if self.parse_keyword(Keyword::SEQUENCE) {
3904 self.parse_create_sequence(temporary)
3905 } else if self.parse_keyword(Keyword::TYPE) {
3906 self.parse_create_type()
3907 } else if self.parse_keyword(Keyword::PROCEDURE) {
3908 self.parse_create_procedure(or_alter)
3909 } else {
3910 self.expected("an object type after CREATE", self.peek_token())
3911 }
3912 }
3913
3914 pub fn parse_create_secret(
3916 &mut self,
3917 or_replace: bool,
3918 temporary: bool,
3919 persistent: bool,
3920 ) -> Result<Statement, ParserError> {
3921 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
3922
3923 let mut storage_specifier = None;
3924 let mut name = None;
3925 if self.peek_token() != Token::LParen {
3926 if self.parse_keyword(Keyword::IN) {
3927 storage_specifier = self.parse_identifier(false).ok()
3928 } else {
3929 name = self.parse_identifier(false).ok();
3930 }
3931
3932 if storage_specifier.is_none()
3934 && self.peek_token() != Token::LParen
3935 && self.parse_keyword(Keyword::IN)
3936 {
3937 storage_specifier = self.parse_identifier(false).ok();
3938 }
3939 }
3940
3941 self.expect_token(&Token::LParen)?;
3942 self.expect_keyword(Keyword::TYPE)?;
3943 let secret_type = self.parse_identifier(false)?;
3944
3945 let mut options = Vec::new();
3946 if self.consume_token(&Token::Comma) {
3947 options.append(&mut self.parse_comma_separated(|p| {
3948 let key = p.parse_identifier(false)?;
3949 let value = p.parse_identifier(false)?;
3950 Ok(SecretOption { key, value })
3951 })?);
3952 }
3953 self.expect_token(&Token::RParen)?;
3954
3955 let temp = match (temporary, persistent) {
3956 (true, false) => Some(true),
3957 (false, true) => Some(false),
3958 (false, false) => None,
3959 _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
3960 };
3961
3962 Ok(Statement::CreateSecret {
3963 or_replace,
3964 temporary: temp,
3965 if_not_exists,
3966 name,
3967 storage_specifier,
3968 secret_type,
3969 options,
3970 })
3971 }
3972
3973 pub fn parse_cache_table(&mut self) -> Result<Statement, ParserError> {
3975 let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None);
3976 if self.parse_keyword(Keyword::TABLE) {
3977 let table_name = self.parse_object_name(false)?;
3978 if self.peek_token().token != Token::EOF {
3979 if let Token::Word(word) = self.peek_token().token {
3980 if word.keyword == Keyword::OPTIONS {
3981 options = self.parse_options(Keyword::OPTIONS)?
3982 }
3983 };
3984
3985 if self.peek_token().token != Token::EOF {
3986 let (a, q) = self.parse_as_query()?;
3987 has_as = a;
3988 query = Some(q);
3989 }
3990
3991 Ok(Statement::Cache {
3992 table_flag,
3993 table_name,
3994 has_as,
3995 options,
3996 query,
3997 })
3998 } else {
3999 Ok(Statement::Cache {
4000 table_flag,
4001 table_name,
4002 has_as,
4003 options,
4004 query,
4005 })
4006 }
4007 } else {
4008 table_flag = Some(self.parse_object_name(false)?);
4009 if self.parse_keyword(Keyword::TABLE) {
4010 let table_name = self.parse_object_name(false)?;
4011 if self.peek_token() != Token::EOF {
4012 if let Token::Word(word) = self.peek_token().token {
4013 if word.keyword == Keyword::OPTIONS {
4014 options = self.parse_options(Keyword::OPTIONS)?
4015 }
4016 };
4017
4018 if self.peek_token() != Token::EOF {
4019 let (a, q) = self.parse_as_query()?;
4020 has_as = a;
4021 query = Some(q);
4022 }
4023
4024 Ok(Statement::Cache {
4025 table_flag,
4026 table_name,
4027 has_as,
4028 options,
4029 query,
4030 })
4031 } else {
4032 Ok(Statement::Cache {
4033 table_flag,
4034 table_name,
4035 has_as,
4036 options,
4037 query,
4038 })
4039 }
4040 } else {
4041 if self.peek_token() == Token::EOF {
4042 self.prev_token();
4043 }
4044 self.expected("a `TABLE` keyword", self.peek_token())
4045 }
4046 }
4047 }
4048
4049 pub fn parse_as_query(&mut self) -> Result<(bool, Box<Query>), ParserError> {
4051 match self.peek_token().token {
4052 Token::Word(word) => match word.keyword {
4053 Keyword::AS => {
4054 self.next_token();
4055 Ok((true, self.parse_query()?))
4056 }
4057 _ => Ok((false, self.parse_query()?)),
4058 },
4059 _ => self.expected("a QUERY statement", self.peek_token()),
4060 }
4061 }
4062
4063 pub fn parse_uncache_table(&mut self) -> Result<Statement, ParserError> {
4065 self.expect_keyword(Keyword::TABLE)?;
4066 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
4067 let table_name = self.parse_object_name(false)?;
4068 Ok(Statement::UNCache {
4069 table_name,
4070 if_exists,
4071 })
4072 }
4073
4074 pub fn parse_create_virtual_table(&mut self) -> Result<Statement, ParserError> {
4076 self.expect_keyword(Keyword::TABLE)?;
4077 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4078 let table_name = self.parse_object_name(false)?;
4079 self.expect_keyword(Keyword::USING)?;
4080 let module_name = self.parse_identifier(false)?;
4081 let module_args = self.parse_parenthesized_column_list(Optional, false)?;
4086 Ok(Statement::CreateVirtualTable {
4087 name: table_name,
4088 if_not_exists,
4089 module_name,
4090 module_args,
4091 })
4092 }
4093
4094 pub fn parse_create_schema(&mut self) -> Result<Statement, ParserError> {
4095 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4096
4097 let schema_name = self.parse_schema_name()?;
4098
4099 Ok(Statement::CreateSchema {
4100 schema_name,
4101 if_not_exists,
4102 })
4103 }
4104
4105 fn parse_schema_name(&mut self) -> Result<SchemaName, ParserError> {
4106 if self.parse_keyword(Keyword::AUTHORIZATION) {
4107 Ok(SchemaName::UnnamedAuthorization(
4108 self.parse_identifier(false)?,
4109 ))
4110 } else {
4111 let name = self.parse_object_name(false)?;
4112
4113 if self.parse_keyword(Keyword::AUTHORIZATION) {
4114 Ok(SchemaName::NamedAuthorization(
4115 name,
4116 self.parse_identifier(false)?,
4117 ))
4118 } else {
4119 Ok(SchemaName::Simple(name))
4120 }
4121 }
4122 }
4123
4124 pub fn parse_create_database(&mut self) -> Result<Statement, ParserError> {
4125 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4126 let db_name = self.parse_object_name(false)?;
4127 let mut location = None;
4128 let mut managed_location = None;
4129 loop {
4130 match self.parse_one_of_keywords(&[Keyword::LOCATION, Keyword::MANAGEDLOCATION]) {
4131 Some(Keyword::LOCATION) => location = Some(self.parse_literal_string()?),
4132 Some(Keyword::MANAGEDLOCATION) => {
4133 managed_location = Some(self.parse_literal_string()?)
4134 }
4135 _ => break,
4136 }
4137 }
4138 Ok(Statement::CreateDatabase {
4139 db_name,
4140 if_not_exists: ine,
4141 location,
4142 managed_location,
4143 })
4144 }
4145
4146 pub fn parse_optional_create_function_using(
4147 &mut self,
4148 ) -> Result<Option<CreateFunctionUsing>, ParserError> {
4149 if !self.parse_keyword(Keyword::USING) {
4150 return Ok(None);
4151 };
4152 let keyword =
4153 self.expect_one_of_keywords(&[Keyword::JAR, Keyword::FILE, Keyword::ARCHIVE])?;
4154
4155 let uri = self.parse_literal_string()?;
4156
4157 match keyword {
4158 Keyword::JAR => Ok(Some(CreateFunctionUsing::Jar(uri))),
4159 Keyword::FILE => Ok(Some(CreateFunctionUsing::File(uri))),
4160 Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
4161 _ => self.expected(
4162 "JAR, FILE or ARCHIVE, got {:?}",
4163 TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
4164 ),
4165 }
4166 }
4167
4168 pub fn parse_create_function(
4169 &mut self,
4170 or_replace: bool,
4171 temporary: bool,
4172 ) -> Result<Statement, ParserError> {
4173 if dialect_of!(self is HiveDialect) {
4174 self.parse_hive_create_function(or_replace, temporary)
4175 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
4176 self.parse_postgres_create_function(or_replace, temporary)
4177 } else if dialect_of!(self is DuckDbDialect) {
4178 self.parse_create_macro(or_replace, temporary)
4179 } else if dialect_of!(self is BigQueryDialect) {
4180 self.parse_bigquery_create_function(or_replace, temporary)
4181 } else {
4182 self.prev_token();
4183 self.expected("an object type after CREATE", self.peek_token())
4184 }
4185 }
4186
4187 fn parse_postgres_create_function(
4191 &mut self,
4192 or_replace: bool,
4193 temporary: bool,
4194 ) -> Result<Statement, ParserError> {
4195 let name = self.parse_object_name(false)?;
4196 self.expect_token(&Token::LParen)?;
4197 let args = if self.consume_token(&Token::RParen) {
4198 self.prev_token();
4199 None
4200 } else {
4201 Some(self.parse_comma_separated(Parser::parse_function_arg)?)
4202 };
4203
4204 self.expect_token(&Token::RParen)?;
4205
4206 let return_type = if self.parse_keyword(Keyword::RETURNS) {
4207 Some(self.parse_data_type()?)
4208 } else {
4209 None
4210 };
4211
4212 #[derive(Default)]
4213 struct Body {
4214 language: Option<Ident>,
4215 behavior: Option<FunctionBehavior>,
4216 function_body: Option<CreateFunctionBody>,
4217 called_on_null: Option<FunctionCalledOnNull>,
4218 parallel: Option<FunctionParallel>,
4219 }
4220 let mut body = Body::default();
4221 loop {
4222 fn ensure_not_set<T>(field: &Option<T>, name: &str) -> Result<(), ParserError> {
4223 if field.is_some() {
4224 return Err(ParserError::ParserError(format!(
4225 "{name} specified more than once",
4226 )));
4227 }
4228 Ok(())
4229 }
4230 if self.parse_keyword(Keyword::AS) {
4231 ensure_not_set(&body.function_body, "AS")?;
4232 body.function_body = Some(CreateFunctionBody::AsBeforeOptions(
4233 self.parse_create_function_body_string()?,
4234 ));
4235 } else if self.parse_keyword(Keyword::LANGUAGE) {
4236 ensure_not_set(&body.language, "LANGUAGE")?;
4237 body.language = Some(self.parse_identifier(false)?);
4238 } else if self.parse_keyword(Keyword::IMMUTABLE) {
4239 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
4240 body.behavior = Some(FunctionBehavior::Immutable);
4241 } else if self.parse_keyword(Keyword::STABLE) {
4242 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
4243 body.behavior = Some(FunctionBehavior::Stable);
4244 } else if self.parse_keyword(Keyword::VOLATILE) {
4245 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
4246 body.behavior = Some(FunctionBehavior::Volatile);
4247 } else if self.parse_keywords(&[
4248 Keyword::CALLED,
4249 Keyword::ON,
4250 Keyword::NULL,
4251 Keyword::INPUT,
4252 ]) {
4253 ensure_not_set(
4254 &body.called_on_null,
4255 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
4256 )?;
4257 body.called_on_null = Some(FunctionCalledOnNull::CalledOnNullInput);
4258 } else if self.parse_keywords(&[
4259 Keyword::RETURNS,
4260 Keyword::NULL,
4261 Keyword::ON,
4262 Keyword::NULL,
4263 Keyword::INPUT,
4264 ]) {
4265 ensure_not_set(
4266 &body.called_on_null,
4267 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
4268 )?;
4269 body.called_on_null = Some(FunctionCalledOnNull::ReturnsNullOnNullInput);
4270 } else if self.parse_keyword(Keyword::STRICT) {
4271 ensure_not_set(
4272 &body.called_on_null,
4273 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
4274 )?;
4275 body.called_on_null = Some(FunctionCalledOnNull::Strict);
4276 } else if self.parse_keyword(Keyword::PARALLEL) {
4277 ensure_not_set(&body.parallel, "PARALLEL { UNSAFE | RESTRICTED | SAFE }")?;
4278 if self.parse_keyword(Keyword::UNSAFE) {
4279 body.parallel = Some(FunctionParallel::Unsafe);
4280 } else if self.parse_keyword(Keyword::RESTRICTED) {
4281 body.parallel = Some(FunctionParallel::Restricted);
4282 } else if self.parse_keyword(Keyword::SAFE) {
4283 body.parallel = Some(FunctionParallel::Safe);
4284 } else {
4285 return self.expected("one of UNSAFE | RESTRICTED | SAFE", self.peek_token());
4286 }
4287 } else if self.parse_keyword(Keyword::RETURN) {
4288 ensure_not_set(&body.function_body, "RETURN")?;
4289 body.function_body = Some(CreateFunctionBody::Return(self.parse_expr()?));
4290 } else {
4291 break;
4292 }
4293 }
4294
4295 Ok(Statement::CreateFunction(CreateFunction {
4296 or_replace,
4297 temporary,
4298 name,
4299 args,
4300 return_type,
4301 behavior: body.behavior,
4302 called_on_null: body.called_on_null,
4303 parallel: body.parallel,
4304 language: body.language,
4305 function_body: body.function_body,
4306 if_not_exists: false,
4307 using: None,
4308 determinism_specifier: None,
4309 options: None,
4310 remote_connection: None,
4311 }))
4312 }
4313
4314 fn parse_hive_create_function(
4318 &mut self,
4319 or_replace: bool,
4320 temporary: bool,
4321 ) -> Result<Statement, ParserError> {
4322 let name = self.parse_object_name(false)?;
4323 self.expect_keyword(Keyword::AS)?;
4324
4325 let as_ = self.parse_create_function_body_string()?;
4326 let using = self.parse_optional_create_function_using()?;
4327
4328 Ok(Statement::CreateFunction(CreateFunction {
4329 or_replace,
4330 temporary,
4331 name,
4332 function_body: Some(CreateFunctionBody::AsBeforeOptions(as_)),
4333 using,
4334 if_not_exists: false,
4335 args: None,
4336 return_type: None,
4337 behavior: None,
4338 called_on_null: None,
4339 parallel: None,
4340 language: None,
4341 determinism_specifier: None,
4342 options: None,
4343 remote_connection: None,
4344 }))
4345 }
4346
4347 fn parse_bigquery_create_function(
4351 &mut self,
4352 or_replace: bool,
4353 temporary: bool,
4354 ) -> Result<Statement, ParserError> {
4355 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4356 let name = self.parse_object_name(false)?;
4357
4358 let parse_function_param =
4359 |parser: &mut Parser| -> Result<OperateFunctionArg, ParserError> {
4360 let name = parser.parse_identifier(false)?;
4361 let data_type = parser.parse_data_type()?;
4362 Ok(OperateFunctionArg {
4363 mode: None,
4364 name: Some(name),
4365 data_type,
4366 default_expr: None,
4367 })
4368 };
4369 self.expect_token(&Token::LParen)?;
4370 let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?;
4371 self.expect_token(&Token::RParen)?;
4372
4373 let return_type = if self.parse_keyword(Keyword::RETURNS) {
4374 Some(self.parse_data_type()?)
4375 } else {
4376 None
4377 };
4378
4379 let determinism_specifier = if self.parse_keyword(Keyword::DETERMINISTIC) {
4380 Some(FunctionDeterminismSpecifier::Deterministic)
4381 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DETERMINISTIC]) {
4382 Some(FunctionDeterminismSpecifier::NotDeterministic)
4383 } else {
4384 None
4385 };
4386
4387 let language = if self.parse_keyword(Keyword::LANGUAGE) {
4388 Some(self.parse_identifier(false)?)
4389 } else {
4390 None
4391 };
4392
4393 let remote_connection =
4394 if self.parse_keywords(&[Keyword::REMOTE, Keyword::WITH, Keyword::CONNECTION]) {
4395 Some(self.parse_object_name(false)?)
4396 } else {
4397 None
4398 };
4399
4400 let mut options = self.maybe_parse_options(Keyword::OPTIONS)?;
4403
4404 let function_body = if remote_connection.is_none() {
4405 self.expect_keyword(Keyword::AS)?;
4406 let expr = self.parse_expr()?;
4407 if options.is_none() {
4408 options = self.maybe_parse_options(Keyword::OPTIONS)?;
4409 Some(CreateFunctionBody::AsBeforeOptions(expr))
4410 } else {
4411 Some(CreateFunctionBody::AsAfterOptions(expr))
4412 }
4413 } else {
4414 None
4415 };
4416
4417 Ok(Statement::CreateFunction(CreateFunction {
4418 or_replace,
4419 temporary,
4420 if_not_exists,
4421 name,
4422 args: Some(args),
4423 return_type,
4424 function_body,
4425 language,
4426 determinism_specifier,
4427 options,
4428 remote_connection,
4429 using: None,
4430 behavior: None,
4431 called_on_null: None,
4432 parallel: None,
4433 }))
4434 }
4435
4436 fn parse_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
4437 let mode = if self.parse_keyword(Keyword::IN) {
4438 Some(ArgMode::In)
4439 } else if self.parse_keyword(Keyword::OUT) {
4440 Some(ArgMode::Out)
4441 } else if self.parse_keyword(Keyword::INOUT) {
4442 Some(ArgMode::InOut)
4443 } else {
4444 None
4445 };
4446
4447 let mut name = None;
4449 let mut data_type = self.parse_data_type()?;
4450 if let DataType::Custom(n, _) = &data_type {
4451 name = Some(n.0[0].clone());
4453 data_type = self.parse_data_type()?;
4454 }
4455
4456 let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq)
4457 {
4458 Some(self.parse_expr()?)
4459 } else {
4460 None
4461 };
4462 Ok(OperateFunctionArg {
4463 mode,
4464 name,
4465 data_type,
4466 default_expr,
4467 })
4468 }
4469
4470 pub fn parse_drop_trigger(&mut self) -> Result<Statement, ParserError> {
4476 if !dialect_of!(self is PostgreSqlDialect | GenericDialect) {
4477 self.prev_token();
4478 return self.expected("an object type after DROP", self.peek_token());
4479 }
4480 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
4481 let trigger_name = self.parse_object_name(false)?;
4482 self.expect_keyword(Keyword::ON)?;
4483 let table_name = self.parse_object_name(false)?;
4484 let option = self
4485 .parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT])
4486 .map(|keyword| match keyword {
4487 Keyword::CASCADE => ReferentialAction::Cascade,
4488 Keyword::RESTRICT => ReferentialAction::Restrict,
4489 _ => unreachable!(),
4490 });
4491 Ok(Statement::DropTrigger {
4492 if_exists,
4493 trigger_name,
4494 table_name,
4495 option,
4496 })
4497 }
4498
4499 pub fn parse_create_trigger(
4500 &mut self,
4501 or_replace: bool,
4502 is_constraint: bool,
4503 ) -> Result<Statement, ParserError> {
4504 if !dialect_of!(self is PostgreSqlDialect | GenericDialect) {
4505 self.prev_token();
4506 return self.expected("an object type after CREATE", self.peek_token());
4507 }
4508
4509 let name = self.parse_object_name(false)?;
4510 let period = self.parse_trigger_period()?;
4511
4512 let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?;
4513 self.expect_keyword(Keyword::ON)?;
4514 let table_name = self.parse_object_name(false)?;
4515
4516 let referenced_table_name = if self.parse_keyword(Keyword::FROM) {
4517 self.parse_object_name(true).ok()
4518 } else {
4519 None
4520 };
4521
4522 let characteristics = self.parse_constraint_characteristics()?;
4523
4524 let mut referencing = vec![];
4525 if self.parse_keyword(Keyword::REFERENCING) {
4526 while let Some(refer) = self.parse_trigger_referencing()? {
4527 referencing.push(refer);
4528 }
4529 }
4530
4531 self.expect_keyword(Keyword::FOR)?;
4532 let include_each = self.parse_keyword(Keyword::EACH);
4533 let trigger_object =
4534 match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? {
4535 Keyword::ROW => TriggerObject::Row,
4536 Keyword::STATEMENT => TriggerObject::Statement,
4537 _ => unreachable!(),
4538 };
4539
4540 let condition = self
4541 .parse_keyword(Keyword::WHEN)
4542 .then(|| self.parse_expr())
4543 .transpose()?;
4544
4545 self.expect_keyword(Keyword::EXECUTE)?;
4546
4547 let exec_body = self.parse_trigger_exec_body()?;
4548
4549 Ok(Statement::CreateTrigger {
4550 or_replace,
4551 is_constraint,
4552 name,
4553 period,
4554 events,
4555 table_name,
4556 referenced_table_name,
4557 referencing,
4558 trigger_object,
4559 include_each,
4560 condition,
4561 exec_body,
4562 characteristics,
4563 })
4564 }
4565
4566 pub fn parse_trigger_period(&mut self) -> Result<TriggerPeriod, ParserError> {
4567 Ok(
4568 match self.expect_one_of_keywords(&[
4569 Keyword::BEFORE,
4570 Keyword::AFTER,
4571 Keyword::INSTEAD,
4572 ])? {
4573 Keyword::BEFORE => TriggerPeriod::Before,
4574 Keyword::AFTER => TriggerPeriod::After,
4575 Keyword::INSTEAD => self
4576 .expect_keyword(Keyword::OF)
4577 .map(|_| TriggerPeriod::InsteadOf)?,
4578 _ => unreachable!(),
4579 },
4580 )
4581 }
4582
4583 pub fn parse_trigger_event(&mut self) -> Result<TriggerEvent, ParserError> {
4584 Ok(
4585 match self.expect_one_of_keywords(&[
4586 Keyword::INSERT,
4587 Keyword::UPDATE,
4588 Keyword::DELETE,
4589 Keyword::TRUNCATE,
4590 ])? {
4591 Keyword::INSERT => TriggerEvent::Insert,
4592 Keyword::UPDATE => {
4593 if self.parse_keyword(Keyword::OF) {
4594 let cols = self.parse_comma_separated(|ident| {
4595 Parser::parse_identifier(ident, false)
4596 })?;
4597 TriggerEvent::Update(cols)
4598 } else {
4599 TriggerEvent::Update(vec![])
4600 }
4601 }
4602 Keyword::DELETE => TriggerEvent::Delete,
4603 Keyword::TRUNCATE => TriggerEvent::Truncate,
4604 _ => unreachable!(),
4605 },
4606 )
4607 }
4608
4609 pub fn parse_trigger_referencing(&mut self) -> Result<Option<TriggerReferencing>, ParserError> {
4610 let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) {
4611 Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => {
4612 TriggerReferencingType::OldTable
4613 }
4614 Some(Keyword::NEW) if self.parse_keyword(Keyword::TABLE) => {
4615 TriggerReferencingType::NewTable
4616 }
4617 _ => {
4618 return Ok(None);
4619 }
4620 };
4621
4622 let is_as = self.parse_keyword(Keyword::AS);
4623 let transition_relation_name = self.parse_object_name(false)?;
4624 Ok(Some(TriggerReferencing {
4625 refer_type,
4626 is_as,
4627 transition_relation_name,
4628 }))
4629 }
4630
4631 pub fn parse_trigger_exec_body(&mut self) -> Result<TriggerExecBody, ParserError> {
4632 Ok(TriggerExecBody {
4633 exec_type: match self
4634 .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])?
4635 {
4636 Keyword::FUNCTION => TriggerExecBodyType::Function,
4637 Keyword::PROCEDURE => TriggerExecBodyType::Procedure,
4638 _ => unreachable!(),
4639 },
4640 func_desc: self.parse_function_desc()?,
4641 })
4642 }
4643
4644 pub fn parse_create_macro(
4645 &mut self,
4646 or_replace: bool,
4647 temporary: bool,
4648 ) -> Result<Statement, ParserError> {
4649 if dialect_of!(self is DuckDbDialect | GenericDialect) {
4650 let name = self.parse_object_name(false)?;
4651 self.expect_token(&Token::LParen)?;
4652 let args = if self.consume_token(&Token::RParen) {
4653 self.prev_token();
4654 None
4655 } else {
4656 Some(self.parse_comma_separated(Parser::parse_macro_arg)?)
4657 };
4658
4659 self.expect_token(&Token::RParen)?;
4660 self.expect_keyword(Keyword::AS)?;
4661
4662 Ok(Statement::CreateMacro {
4663 or_replace,
4664 temporary,
4665 name,
4666 args,
4667 definition: if self.parse_keyword(Keyword::TABLE) {
4668 MacroDefinition::Table(self.parse_query()?)
4669 } else {
4670 MacroDefinition::Expr(self.parse_expr()?)
4671 },
4672 })
4673 } else {
4674 self.prev_token();
4675 self.expected("an object type after CREATE", self.peek_token())
4676 }
4677 }
4678
4679 fn parse_macro_arg(&mut self) -> Result<MacroArg, ParserError> {
4680 let name = self.parse_identifier(false)?;
4681
4682 let default_expr =
4683 if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) {
4684 Some(self.parse_expr()?)
4685 } else {
4686 None
4687 };
4688 Ok(MacroArg { name, default_expr })
4689 }
4690
4691 pub fn parse_create_external_table(
4692 &mut self,
4693 or_replace: bool,
4694 ) -> Result<Statement, ParserError> {
4695 self.expect_keyword(Keyword::TABLE)?;
4696 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4697 let table_name = self.parse_object_name(false)?;
4698 let (columns, constraints) = self.parse_columns()?;
4699
4700 let hive_distribution = self.parse_hive_distribution()?;
4701 let hive_formats = self.parse_hive_formats()?;
4702
4703 let file_format = if let Some(ff) = &hive_formats.storage {
4704 match ff {
4705 HiveIOFormat::FileFormat { format } => Some(*format),
4706 _ => None,
4707 }
4708 } else {
4709 None
4710 };
4711 let location = hive_formats.location.clone();
4712 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
4713 Ok(CreateTableBuilder::new(table_name)
4714 .columns(columns)
4715 .constraints(constraints)
4716 .hive_distribution(hive_distribution)
4717 .hive_formats(Some(hive_formats))
4718 .table_properties(table_properties)
4719 .or_replace(or_replace)
4720 .if_not_exists(if_not_exists)
4721 .external(true)
4722 .file_format(file_format)
4723 .location(location)
4724 .build())
4725 }
4726
4727 pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
4728 let next_token = self.next_token();
4729 match &next_token.token {
4730 Token::Word(w) => match w.keyword {
4731 Keyword::AVRO => Ok(FileFormat::AVRO),
4732 Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
4733 Keyword::ORC => Ok(FileFormat::ORC),
4734 Keyword::PARQUET => Ok(FileFormat::PARQUET),
4735 Keyword::RCFILE => Ok(FileFormat::RCFILE),
4736 Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
4737 Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
4738 _ => self.expected("fileformat", next_token),
4739 },
4740 _ => self.expected("fileformat", next_token),
4741 }
4742 }
4743
4744 pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
4745 let next_token = self.next_token();
4746 match &next_token.token {
4747 Token::Word(w) => match w.keyword {
4748 Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
4749 Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
4750 Keyword::JSON => Ok(AnalyzeFormat::JSON),
4751 _ => self.expected("fileformat", next_token),
4752 },
4753 _ => self.expected("fileformat", next_token),
4754 }
4755 }
4756
4757 pub fn parse_create_view(
4758 &mut self,
4759 or_replace: bool,
4760 temporary: bool,
4761 ) -> Result<Statement, ParserError> {
4762 let materialized = self.parse_keyword(Keyword::MATERIALIZED);
4763 self.expect_keyword(Keyword::VIEW)?;
4764 let if_not_exists = dialect_of!(self is BigQueryDialect|SQLiteDialect|GenericDialect)
4765 && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4766 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
4769 let name = self.parse_object_name(allow_unquoted_hyphen)?;
4770 let columns = self.parse_view_columns()?;
4771 let mut options = CreateTableOptions::None;
4772 let with_options = self.parse_options(Keyword::WITH)?;
4773 if !with_options.is_empty() {
4774 options = CreateTableOptions::With(with_options);
4775 }
4776
4777 let cluster_by = if self.parse_keyword(Keyword::CLUSTER) {
4778 self.expect_keyword(Keyword::BY)?;
4779 self.parse_parenthesized_column_list(Optional, false)?
4780 } else {
4781 vec![]
4782 };
4783
4784 if dialect_of!(self is BigQueryDialect | GenericDialect) {
4785 if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
4786 if !opts.is_empty() {
4787 options = CreateTableOptions::Options(opts);
4788 }
4789 };
4790 }
4791
4792 let to = if dialect_of!(self is ClickHouseDialect | GenericDialect)
4793 && self.parse_keyword(Keyword::TO)
4794 {
4795 Some(self.parse_object_name(false)?)
4796 } else {
4797 None
4798 };
4799
4800 let comment = if dialect_of!(self is SnowflakeDialect | GenericDialect)
4801 && self.parse_keyword(Keyword::COMMENT)
4802 {
4803 self.expect_token(&Token::Eq)?;
4804 let next_token = self.next_token();
4805 match next_token.token {
4806 Token::SingleQuotedString(str) => Some(str),
4807 _ => self.expected("string literal", next_token)?,
4808 }
4809 } else {
4810 None
4811 };
4812
4813 self.expect_keyword(Keyword::AS)?;
4814 let query = self.parse_query()?;
4815 let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
4818 && self.parse_keywords(&[
4819 Keyword::WITH,
4820 Keyword::NO,
4821 Keyword::SCHEMA,
4822 Keyword::BINDING,
4823 ]);
4824
4825 Ok(Statement::CreateView {
4826 name,
4827 columns,
4828 query,
4829 materialized,
4830 or_replace,
4831 options,
4832 cluster_by,
4833 comment,
4834 with_no_schema_binding,
4835 if_not_exists,
4836 temporary,
4837 to,
4838 })
4839 }
4840
4841 pub fn parse_create_role(&mut self) -> Result<Statement, ParserError> {
4842 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4843 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
4844
4845 let _ = self.parse_keyword(Keyword::WITH); let optional_keywords = if dialect_of!(self is MsSqlDialect) {
4848 vec![Keyword::AUTHORIZATION]
4849 } else if dialect_of!(self is PostgreSqlDialect) {
4850 vec![
4851 Keyword::LOGIN,
4852 Keyword::NOLOGIN,
4853 Keyword::INHERIT,
4854 Keyword::NOINHERIT,
4855 Keyword::BYPASSRLS,
4856 Keyword::NOBYPASSRLS,
4857 Keyword::PASSWORD,
4858 Keyword::CREATEDB,
4859 Keyword::NOCREATEDB,
4860 Keyword::CREATEROLE,
4861 Keyword::NOCREATEROLE,
4862 Keyword::SUPERUSER,
4863 Keyword::NOSUPERUSER,
4864 Keyword::REPLICATION,
4865 Keyword::NOREPLICATION,
4866 Keyword::CONNECTION,
4867 Keyword::VALID,
4868 Keyword::IN,
4869 Keyword::ROLE,
4870 Keyword::ADMIN,
4871 Keyword::USER,
4872 ]
4873 } else {
4874 vec![]
4875 };
4876
4877 let mut authorization_owner = None;
4879 let mut login = None;
4881 let mut inherit = None;
4882 let mut bypassrls = None;
4883 let mut password = None;
4884 let mut create_db = None;
4885 let mut create_role = None;
4886 let mut superuser = None;
4887 let mut replication = None;
4888 let mut connection_limit = None;
4889 let mut valid_until = None;
4890 let mut in_role = vec![];
4891 let mut in_group = vec![];
4892 let mut role = vec![];
4893 let mut user = vec![];
4894 let mut admin = vec![];
4895
4896 while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) {
4897 let loc = self
4898 .tokens
4899 .get(self.index - 1)
4900 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
4901 match keyword {
4902 Keyword::AUTHORIZATION => {
4903 if authorization_owner.is_some() {
4904 parser_err!("Found multiple AUTHORIZATION", loc)
4905 } else {
4906 authorization_owner = Some(self.parse_object_name(false)?);
4907 Ok(())
4908 }
4909 }
4910 Keyword::LOGIN | Keyword::NOLOGIN => {
4911 if login.is_some() {
4912 parser_err!("Found multiple LOGIN or NOLOGIN", loc)
4913 } else {
4914 login = Some(keyword == Keyword::LOGIN);
4915 Ok(())
4916 }
4917 }
4918 Keyword::INHERIT | Keyword::NOINHERIT => {
4919 if inherit.is_some() {
4920 parser_err!("Found multiple INHERIT or NOINHERIT", loc)
4921 } else {
4922 inherit = Some(keyword == Keyword::INHERIT);
4923 Ok(())
4924 }
4925 }
4926 Keyword::BYPASSRLS | Keyword::NOBYPASSRLS => {
4927 if bypassrls.is_some() {
4928 parser_err!("Found multiple BYPASSRLS or NOBYPASSRLS", loc)
4929 } else {
4930 bypassrls = Some(keyword == Keyword::BYPASSRLS);
4931 Ok(())
4932 }
4933 }
4934 Keyword::CREATEDB | Keyword::NOCREATEDB => {
4935 if create_db.is_some() {
4936 parser_err!("Found multiple CREATEDB or NOCREATEDB", loc)
4937 } else {
4938 create_db = Some(keyword == Keyword::CREATEDB);
4939 Ok(())
4940 }
4941 }
4942 Keyword::CREATEROLE | Keyword::NOCREATEROLE => {
4943 if create_role.is_some() {
4944 parser_err!("Found multiple CREATEROLE or NOCREATEROLE", loc)
4945 } else {
4946 create_role = Some(keyword == Keyword::CREATEROLE);
4947 Ok(())
4948 }
4949 }
4950 Keyword::SUPERUSER | Keyword::NOSUPERUSER => {
4951 if superuser.is_some() {
4952 parser_err!("Found multiple SUPERUSER or NOSUPERUSER", loc)
4953 } else {
4954 superuser = Some(keyword == Keyword::SUPERUSER);
4955 Ok(())
4956 }
4957 }
4958 Keyword::REPLICATION | Keyword::NOREPLICATION => {
4959 if replication.is_some() {
4960 parser_err!("Found multiple REPLICATION or NOREPLICATION", loc)
4961 } else {
4962 replication = Some(keyword == Keyword::REPLICATION);
4963 Ok(())
4964 }
4965 }
4966 Keyword::PASSWORD => {
4967 if password.is_some() {
4968 parser_err!("Found multiple PASSWORD", loc)
4969 } else {
4970 password = if self.parse_keyword(Keyword::NULL) {
4971 Some(Password::NullPassword)
4972 } else {
4973 Some(Password::Password(Expr::Value(self.parse_value()?)))
4974 };
4975 Ok(())
4976 }
4977 }
4978 Keyword::CONNECTION => {
4979 self.expect_keyword(Keyword::LIMIT)?;
4980 if connection_limit.is_some() {
4981 parser_err!("Found multiple CONNECTION LIMIT", loc)
4982 } else {
4983 connection_limit = Some(Expr::Value(self.parse_number_value()?));
4984 Ok(())
4985 }
4986 }
4987 Keyword::VALID => {
4988 self.expect_keyword(Keyword::UNTIL)?;
4989 if valid_until.is_some() {
4990 parser_err!("Found multiple VALID UNTIL", loc)
4991 } else {
4992 valid_until = Some(Expr::Value(self.parse_value()?));
4993 Ok(())
4994 }
4995 }
4996 Keyword::IN => {
4997 if self.parse_keyword(Keyword::ROLE) {
4998 if !in_role.is_empty() {
4999 parser_err!("Found multiple IN ROLE", loc)
5000 } else {
5001 in_role = self.parse_comma_separated(|p| p.parse_identifier(false))?;
5002 Ok(())
5003 }
5004 } else if self.parse_keyword(Keyword::GROUP) {
5005 if !in_group.is_empty() {
5006 parser_err!("Found multiple IN GROUP", loc)
5007 } else {
5008 in_group = self.parse_comma_separated(|p| p.parse_identifier(false))?;
5009 Ok(())
5010 }
5011 } else {
5012 self.expected("ROLE or GROUP after IN", self.peek_token())
5013 }
5014 }
5015 Keyword::ROLE => {
5016 if !role.is_empty() {
5017 parser_err!("Found multiple ROLE", loc)
5018 } else {
5019 role = self.parse_comma_separated(|p| p.parse_identifier(false))?;
5020 Ok(())
5021 }
5022 }
5023 Keyword::USER => {
5024 if !user.is_empty() {
5025 parser_err!("Found multiple USER", loc)
5026 } else {
5027 user = self.parse_comma_separated(|p| p.parse_identifier(false))?;
5028 Ok(())
5029 }
5030 }
5031 Keyword::ADMIN => {
5032 if !admin.is_empty() {
5033 parser_err!("Found multiple ADMIN", loc)
5034 } else {
5035 admin = self.parse_comma_separated(|p| p.parse_identifier(false))?;
5036 Ok(())
5037 }
5038 }
5039 _ => break,
5040 }?
5041 }
5042
5043 Ok(Statement::CreateRole {
5044 names,
5045 if_not_exists,
5046 login,
5047 inherit,
5048 bypassrls,
5049 password,
5050 create_db,
5051 create_role,
5052 replication,
5053 superuser,
5054 connection_limit,
5055 valid_until,
5056 in_role,
5057 in_group,
5058 role,
5059 user,
5060 admin,
5061 authorization_owner,
5062 })
5063 }
5064
5065 pub fn parse_owner(&mut self) -> Result<Owner, ParserError> {
5066 let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) {
5067 Some(Keyword::CURRENT_USER) => Owner::CurrentUser,
5068 Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole,
5069 Some(Keyword::SESSION_USER) => Owner::SessionUser,
5070 Some(_) => unreachable!(),
5071 None => {
5072 match self.parse_identifier(false) {
5073 Ok(ident) => Owner::Ident(ident),
5074 Err(e) => {
5075 return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}")))
5076 }
5077 }
5078 }
5079 };
5080 Ok(owner)
5081 }
5082
5083 pub fn parse_create_policy(&mut self) -> Result<Statement, ParserError> {
5093 let name = self.parse_identifier(false)?;
5094 self.expect_keyword(Keyword::ON)?;
5095 let table_name = self.parse_object_name(false)?;
5096
5097 let policy_type = if self.parse_keyword(Keyword::AS) {
5098 let keyword =
5099 self.expect_one_of_keywords(&[Keyword::PERMISSIVE, Keyword::RESTRICTIVE])?;
5100 Some(match keyword {
5101 Keyword::PERMISSIVE => CreatePolicyType::Permissive,
5102 Keyword::RESTRICTIVE => CreatePolicyType::Restrictive,
5103 _ => unreachable!(),
5104 })
5105 } else {
5106 None
5107 };
5108
5109 let command = if self.parse_keyword(Keyword::FOR) {
5110 let keyword = self.expect_one_of_keywords(&[
5111 Keyword::ALL,
5112 Keyword::SELECT,
5113 Keyword::INSERT,
5114 Keyword::UPDATE,
5115 Keyword::DELETE,
5116 ])?;
5117 Some(match keyword {
5118 Keyword::ALL => CreatePolicyCommand::All,
5119 Keyword::SELECT => CreatePolicyCommand::Select,
5120 Keyword::INSERT => CreatePolicyCommand::Insert,
5121 Keyword::UPDATE => CreatePolicyCommand::Update,
5122 Keyword::DELETE => CreatePolicyCommand::Delete,
5123 _ => unreachable!(),
5124 })
5125 } else {
5126 None
5127 };
5128
5129 let to = if self.parse_keyword(Keyword::TO) {
5130 Some(self.parse_comma_separated(|p| p.parse_owner())?)
5131 } else {
5132 None
5133 };
5134
5135 let using = if self.parse_keyword(Keyword::USING) {
5136 self.expect_token(&Token::LParen)?;
5137 let expr = self.parse_expr()?;
5138 self.expect_token(&Token::RParen)?;
5139 Some(expr)
5140 } else {
5141 None
5142 };
5143
5144 let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) {
5145 self.expect_token(&Token::LParen)?;
5146 let expr = self.parse_expr()?;
5147 self.expect_token(&Token::RParen)?;
5148 Some(expr)
5149 } else {
5150 None
5151 };
5152
5153 Ok(CreatePolicy {
5154 name,
5155 table_name,
5156 policy_type,
5157 command,
5158 to,
5159 using,
5160 with_check,
5161 })
5162 }
5163
5164 pub fn parse_drop(&mut self) -> Result<Statement, ParserError> {
5165 let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect)
5167 && self.parse_keyword(Keyword::TEMPORARY);
5168 let persistent = dialect_of!(self is DuckDbDialect)
5169 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
5170
5171 let object_type = if self.parse_keyword(Keyword::TABLE) {
5172 ObjectType::Table
5173 } else if self.parse_keyword(Keyword::VIEW) {
5174 ObjectType::View
5175 } else if self.parse_keyword(Keyword::INDEX) {
5176 ObjectType::Index
5177 } else if self.parse_keyword(Keyword::ROLE) {
5178 ObjectType::Role
5179 } else if self.parse_keyword(Keyword::SCHEMA) {
5180 ObjectType::Schema
5181 } else if self.parse_keyword(Keyword::DATABASE) {
5182 ObjectType::Database
5183 } else if self.parse_keyword(Keyword::SEQUENCE) {
5184 ObjectType::Sequence
5185 } else if self.parse_keyword(Keyword::STAGE) {
5186 ObjectType::Stage
5187 } else if self.parse_keyword(Keyword::TYPE) {
5188 ObjectType::Type
5189 } else if self.parse_keyword(Keyword::FUNCTION) {
5190 return self.parse_drop_function();
5191 } else if self.parse_keyword(Keyword::POLICY) {
5192 return self.parse_drop_policy();
5193 } else if self.parse_keyword(Keyword::PROCEDURE) {
5194 return self.parse_drop_procedure();
5195 } else if self.parse_keyword(Keyword::SECRET) {
5196 return self.parse_drop_secret(temporary, persistent);
5197 } else if self.parse_keyword(Keyword::TRIGGER) {
5198 return self.parse_drop_trigger();
5199 } else {
5200 return self.expected(
5201 "TABLE, VIEW, INDEX, ROLE, SCHEMA, DATABASE, FUNCTION, PROCEDURE, STAGE, TRIGGER, SECRET, SEQUENCE, or TYPE after DROP",
5202 self.peek_token(),
5203 );
5204 };
5205 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5208 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
5209
5210 let loc = self.peek_token().span.start;
5211 let cascade = self.parse_keyword(Keyword::CASCADE);
5212 let restrict = self.parse_keyword(Keyword::RESTRICT);
5213 let purge = self.parse_keyword(Keyword::PURGE);
5214 if cascade && restrict {
5215 return parser_err!("Cannot specify both CASCADE and RESTRICT in DROP", loc);
5216 }
5217 if object_type == ObjectType::Role && (cascade || restrict || purge) {
5218 return parser_err!(
5219 "Cannot specify CASCADE, RESTRICT, or PURGE in DROP ROLE",
5220 loc
5221 );
5222 }
5223 Ok(Statement::Drop {
5224 object_type,
5225 if_exists,
5226 names,
5227 cascade,
5228 restrict,
5229 purge,
5230 temporary,
5231 })
5232 }
5233
5234 fn parse_optional_referential_action(&mut self) -> Option<ReferentialAction> {
5235 match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
5236 Some(Keyword::CASCADE) => Some(ReferentialAction::Cascade),
5237 Some(Keyword::RESTRICT) => Some(ReferentialAction::Restrict),
5238 _ => None,
5239 }
5240 }
5241
5242 fn parse_drop_function(&mut self) -> Result<Statement, ParserError> {
5247 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5248 let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
5249 let option = self.parse_optional_referential_action();
5250 Ok(Statement::DropFunction {
5251 if_exists,
5252 func_desc,
5253 option,
5254 })
5255 }
5256
5257 fn parse_drop_policy(&mut self) -> Result<Statement, ParserError> {
5263 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5264 let name = self.parse_identifier(false)?;
5265 self.expect_keyword(Keyword::ON)?;
5266 let table_name = self.parse_object_name(false)?;
5267 let option = self.parse_optional_referential_action();
5268 Ok(Statement::DropPolicy {
5269 if_exists,
5270 name,
5271 table_name,
5272 option,
5273 })
5274 }
5275
5276 fn parse_drop_procedure(&mut self) -> Result<Statement, ParserError> {
5281 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5282 let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
5283 let option = self.parse_optional_referential_action();
5284 Ok(Statement::DropProcedure {
5285 if_exists,
5286 proc_desc,
5287 option,
5288 })
5289 }
5290
5291 fn parse_function_desc(&mut self) -> Result<FunctionDesc, ParserError> {
5292 let name = self.parse_object_name(false)?;
5293
5294 let args = if self.consume_token(&Token::LParen) {
5295 if self.consume_token(&Token::RParen) {
5296 None
5297 } else {
5298 let args = self.parse_comma_separated(Parser::parse_function_arg)?;
5299 self.expect_token(&Token::RParen)?;
5300 Some(args)
5301 }
5302 } else {
5303 None
5304 };
5305
5306 Ok(FunctionDesc { name, args })
5307 }
5308
5309 fn parse_drop_secret(
5311 &mut self,
5312 temporary: bool,
5313 persistent: bool,
5314 ) -> Result<Statement, ParserError> {
5315 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5316 let name = self.parse_identifier(false)?;
5317 let storage_specifier = if self.parse_keyword(Keyword::FROM) {
5318 self.parse_identifier(false).ok()
5319 } else {
5320 None
5321 };
5322 let temp = match (temporary, persistent) {
5323 (true, false) => Some(true),
5324 (false, true) => Some(false),
5325 (false, false) => None,
5326 _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
5327 };
5328
5329 Ok(Statement::DropSecret {
5330 if_exists,
5331 temporary: temp,
5332 name,
5333 storage_specifier,
5334 })
5335 }
5336
5337 pub fn parse_declare(&mut self) -> Result<Statement, ParserError> {
5347 if dialect_of!(self is BigQueryDialect) {
5348 return self.parse_big_query_declare();
5349 }
5350 if dialect_of!(self is SnowflakeDialect) {
5351 return self.parse_snowflake_declare();
5352 }
5353 if dialect_of!(self is MsSqlDialect) {
5354 return self.parse_mssql_declare();
5355 }
5356
5357 let name = self.parse_identifier(false)?;
5358
5359 let binary = Some(self.parse_keyword(Keyword::BINARY));
5360 let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) {
5361 Some(true)
5362 } else if self.parse_keyword(Keyword::ASENSITIVE) {
5363 Some(false)
5364 } else {
5365 None
5366 };
5367 let scroll = if self.parse_keyword(Keyword::SCROLL) {
5368 Some(true)
5369 } else if self.parse_keywords(&[Keyword::NO, Keyword::SCROLL]) {
5370 Some(false)
5371 } else {
5372 None
5373 };
5374
5375 self.expect_keyword(Keyword::CURSOR)?;
5376 let declare_type = Some(DeclareType::Cursor);
5377
5378 let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
5379 Some(keyword) => {
5380 self.expect_keyword(Keyword::HOLD)?;
5381
5382 match keyword {
5383 Keyword::WITH => Some(true),
5384 Keyword::WITHOUT => Some(false),
5385 _ => unreachable!(),
5386 }
5387 }
5388 None => None,
5389 };
5390
5391 self.expect_keyword(Keyword::FOR)?;
5392
5393 let query = Some(self.parse_query()?);
5394
5395 Ok(Statement::Declare {
5396 stmts: vec![Declare {
5397 names: vec![name],
5398 data_type: None,
5399 assignment: None,
5400 declare_type,
5401 binary,
5402 sensitive,
5403 scroll,
5404 hold,
5405 for_query: query,
5406 }],
5407 })
5408 }
5409
5410 pub fn parse_big_query_declare(&mut self) -> Result<Statement, ParserError> {
5418 let names = self.parse_comma_separated(|parser| Parser::parse_identifier(parser, false))?;
5419
5420 let data_type = match self.peek_token().token {
5421 Token::Word(w) if w.keyword == Keyword::DEFAULT => None,
5422 _ => Some(self.parse_data_type()?),
5423 };
5424
5425 let expr = if data_type.is_some() {
5426 if self.parse_keyword(Keyword::DEFAULT) {
5427 Some(self.parse_expr()?)
5428 } else {
5429 None
5430 }
5431 } else {
5432 self.expect_keyword(Keyword::DEFAULT)?;
5435 Some(self.parse_expr()?)
5436 };
5437
5438 Ok(Statement::Declare {
5439 stmts: vec![Declare {
5440 names,
5441 data_type,
5442 assignment: expr.map(|expr| DeclareAssignment::Default(Box::new(expr))),
5443 declare_type: None,
5444 binary: None,
5445 sensitive: None,
5446 scroll: None,
5447 hold: None,
5448 for_query: None,
5449 }],
5450 })
5451 }
5452
5453 pub fn parse_snowflake_declare(&mut self) -> Result<Statement, ParserError> {
5478 let mut stmts = vec![];
5479 loop {
5480 let name = self.parse_identifier(false)?;
5481 let (declare_type, for_query, assigned_expr, data_type) =
5482 if self.parse_keyword(Keyword::CURSOR) {
5483 self.expect_keyword(Keyword::FOR)?;
5484 match self.peek_token().token {
5485 Token::Word(w) if w.keyword == Keyword::SELECT => (
5486 Some(DeclareType::Cursor),
5487 Some(self.parse_query()?),
5488 None,
5489 None,
5490 ),
5491 _ => (
5492 Some(DeclareType::Cursor),
5493 None,
5494 Some(DeclareAssignment::For(Box::new(self.parse_expr()?))),
5495 None,
5496 ),
5497 }
5498 } else if self.parse_keyword(Keyword::RESULTSET) {
5499 let assigned_expr = if self.peek_token().token != Token::SemiColon {
5500 self.parse_snowflake_variable_declaration_expression()?
5501 } else {
5502 None
5504 };
5505
5506 (Some(DeclareType::ResultSet), None, assigned_expr, None)
5507 } else if self.parse_keyword(Keyword::EXCEPTION) {
5508 let assigned_expr = if self.peek_token().token == Token::LParen {
5509 Some(DeclareAssignment::Expr(Box::new(self.parse_expr()?)))
5510 } else {
5511 None
5513 };
5514
5515 (Some(DeclareType::Exception), None, assigned_expr, None)
5516 } else {
5517 let (assigned_expr, data_type) = if let Some(assigned_expr) =
5519 self.parse_snowflake_variable_declaration_expression()?
5520 {
5521 (Some(assigned_expr), None)
5522 } else if let Token::Word(_) = self.peek_token().token {
5523 let data_type = self.parse_data_type()?;
5524 (
5525 self.parse_snowflake_variable_declaration_expression()?,
5526 Some(data_type),
5527 )
5528 } else {
5529 (None, None)
5530 };
5531 (None, None, assigned_expr, data_type)
5532 };
5533 let stmt = Declare {
5534 names: vec![name],
5535 data_type,
5536 assignment: assigned_expr,
5537 declare_type,
5538 binary: None,
5539 sensitive: None,
5540 scroll: None,
5541 hold: None,
5542 for_query,
5543 };
5544
5545 stmts.push(stmt);
5546 if self.consume_token(&Token::SemiColon) {
5547 match self.peek_token().token {
5548 Token::Word(w)
5549 if ALL_KEYWORDS
5550 .binary_search(&w.value.to_uppercase().as_str())
5551 .is_err() =>
5552 {
5553 continue;
5555 }
5556 _ => {
5557 self.prev_token();
5559 }
5560 }
5561 }
5562
5563 break;
5564 }
5565
5566 Ok(Statement::Declare { stmts })
5567 }
5568
5569 pub fn parse_mssql_declare(&mut self) -> Result<Statement, ParserError> {
5581 let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?;
5582
5583 Ok(Statement::Declare { stmts })
5584 }
5585
5586 pub fn parse_mssql_declare_stmt(&mut self) -> Result<Declare, ParserError> {
5597 let name = {
5598 let ident = self.parse_identifier(false)?;
5599 if !ident.value.starts_with('@') {
5600 Err(ParserError::TokenizerError(
5601 "Invalid MsSql variable declaration.".to_string(),
5602 ))
5603 } else {
5604 Ok(ident)
5605 }
5606 }?;
5607
5608 let (declare_type, data_type) = match self.peek_token().token {
5609 Token::Word(w) => match w.keyword {
5610 Keyword::CURSOR => {
5611 self.next_token();
5612 (Some(DeclareType::Cursor), None)
5613 }
5614 Keyword::AS => {
5615 self.next_token();
5616 (None, Some(self.parse_data_type()?))
5617 }
5618 _ => (None, Some(self.parse_data_type()?)),
5619 },
5620 _ => (None, Some(self.parse_data_type()?)),
5621 };
5622
5623 let assignment = self.parse_mssql_variable_declaration_expression()?;
5624
5625 Ok(Declare {
5626 names: vec![name],
5627 data_type,
5628 assignment,
5629 declare_type,
5630 binary: None,
5631 sensitive: None,
5632 scroll: None,
5633 hold: None,
5634 for_query: None,
5635 })
5636 }
5637
5638 pub fn parse_snowflake_variable_declaration_expression(
5646 &mut self,
5647 ) -> Result<Option<DeclareAssignment>, ParserError> {
5648 Ok(match self.peek_token().token {
5649 Token::Word(w) if w.keyword == Keyword::DEFAULT => {
5650 self.next_token(); Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
5652 }
5653 Token::Assignment => {
5654 self.next_token(); Some(DeclareAssignment::DuckAssignment(Box::new(
5656 self.parse_expr()?,
5657 )))
5658 }
5659 _ => None,
5660 })
5661 }
5662
5663 pub fn parse_mssql_variable_declaration_expression(
5670 &mut self,
5671 ) -> Result<Option<DeclareAssignment>, ParserError> {
5672 Ok(match self.peek_token().token {
5673 Token::Eq => {
5674 self.next_token(); Some(DeclareAssignment::MsSqlAssignment(Box::new(
5676 self.parse_expr()?,
5677 )))
5678 }
5679 _ => None,
5680 })
5681 }
5682
5683 pub fn parse_fetch_statement(&mut self) -> Result<Statement, ParserError> {
5685 let direction = if self.parse_keyword(Keyword::NEXT) {
5686 FetchDirection::Next
5687 } else if self.parse_keyword(Keyword::PRIOR) {
5688 FetchDirection::Prior
5689 } else if self.parse_keyword(Keyword::FIRST) {
5690 FetchDirection::First
5691 } else if self.parse_keyword(Keyword::LAST) {
5692 FetchDirection::Last
5693 } else if self.parse_keyword(Keyword::ABSOLUTE) {
5694 FetchDirection::Absolute {
5695 limit: self.parse_number_value()?,
5696 }
5697 } else if self.parse_keyword(Keyword::RELATIVE) {
5698 FetchDirection::Relative {
5699 limit: self.parse_number_value()?,
5700 }
5701 } else if self.parse_keyword(Keyword::FORWARD) {
5702 if self.parse_keyword(Keyword::ALL) {
5703 FetchDirection::ForwardAll
5704 } else {
5705 FetchDirection::Forward {
5706 limit: Some(self.parse_number_value()?),
5708 }
5709 }
5710 } else if self.parse_keyword(Keyword::BACKWARD) {
5711 if self.parse_keyword(Keyword::ALL) {
5712 FetchDirection::BackwardAll
5713 } else {
5714 FetchDirection::Backward {
5715 limit: Some(self.parse_number_value()?),
5717 }
5718 }
5719 } else if self.parse_keyword(Keyword::ALL) {
5720 FetchDirection::All
5721 } else {
5722 FetchDirection::Count {
5723 limit: self.parse_number_value()?,
5724 }
5725 };
5726
5727 self.expect_one_of_keywords(&[Keyword::FROM, Keyword::IN])?;
5728
5729 let name = self.parse_identifier(false)?;
5730
5731 let into = if self.parse_keyword(Keyword::INTO) {
5732 Some(self.parse_object_name(false)?)
5733 } else {
5734 None
5735 };
5736
5737 Ok(Statement::Fetch {
5738 name,
5739 direction,
5740 into,
5741 })
5742 }
5743
5744 pub fn parse_discard(&mut self) -> Result<Statement, ParserError> {
5745 let object_type = if self.parse_keyword(Keyword::ALL) {
5746 DiscardObject::ALL
5747 } else if self.parse_keyword(Keyword::PLANS) {
5748 DiscardObject::PLANS
5749 } else if self.parse_keyword(Keyword::SEQUENCES) {
5750 DiscardObject::SEQUENCES
5751 } else if self.parse_keyword(Keyword::TEMP) || self.parse_keyword(Keyword::TEMPORARY) {
5752 DiscardObject::TEMP
5753 } else {
5754 return self.expected(
5755 "ALL, PLANS, SEQUENCES, TEMP or TEMPORARY after DISCARD",
5756 self.peek_token(),
5757 );
5758 };
5759 Ok(Statement::Discard { object_type })
5760 }
5761
5762 pub fn parse_create_index(&mut self, unique: bool) -> Result<Statement, ParserError> {
5763 let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
5764 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5765 let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) {
5766 let index_name = self.parse_object_name(false)?;
5767 self.expect_keyword(Keyword::ON)?;
5768 Some(index_name)
5769 } else {
5770 None
5771 };
5772 let table_name = self.parse_object_name(false)?;
5773 let using = if self.parse_keyword(Keyword::USING) {
5774 Some(self.parse_identifier(false)?)
5775 } else {
5776 None
5777 };
5778 self.expect_token(&Token::LParen)?;
5779 let columns = self.parse_comma_separated(Parser::parse_order_by_expr)?;
5780 self.expect_token(&Token::RParen)?;
5781
5782 let include = if self.parse_keyword(Keyword::INCLUDE) {
5783 self.expect_token(&Token::LParen)?;
5784 let columns = self.parse_comma_separated(|p| p.parse_identifier(false))?;
5785 self.expect_token(&Token::RParen)?;
5786 columns
5787 } else {
5788 vec![]
5789 };
5790
5791 let nulls_distinct = if self.parse_keyword(Keyword::NULLS) {
5792 let not = self.parse_keyword(Keyword::NOT);
5793 self.expect_keyword(Keyword::DISTINCT)?;
5794 Some(!not)
5795 } else {
5796 None
5797 };
5798
5799 let with = if self.dialect.supports_create_index_with_clause()
5800 && self.parse_keyword(Keyword::WITH)
5801 {
5802 self.expect_token(&Token::LParen)?;
5803 let with_params = self.parse_comma_separated(Parser::parse_expr)?;
5804 self.expect_token(&Token::RParen)?;
5805 with_params
5806 } else {
5807 Vec::new()
5808 };
5809
5810 let predicate = if self.parse_keyword(Keyword::WHERE) {
5811 Some(self.parse_expr()?)
5812 } else {
5813 None
5814 };
5815
5816 Ok(Statement::CreateIndex(CreateIndex {
5817 name: index_name,
5818 table_name,
5819 using,
5820 columns,
5821 unique,
5822 concurrently,
5823 if_not_exists,
5824 include,
5825 nulls_distinct,
5826 with,
5827 predicate,
5828 }))
5829 }
5830
5831 pub fn parse_create_extension(&mut self) -> Result<Statement, ParserError> {
5832 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5833 let name = self.parse_identifier(false)?;
5834
5835 let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) {
5836 let schema = if self.parse_keyword(Keyword::SCHEMA) {
5837 Some(self.parse_identifier(false)?)
5838 } else {
5839 None
5840 };
5841
5842 let version = if self.parse_keyword(Keyword::VERSION) {
5843 Some(self.parse_identifier(false)?)
5844 } else {
5845 None
5846 };
5847
5848 let cascade = self.parse_keyword(Keyword::CASCADE);
5849
5850 (schema, version, cascade)
5851 } else {
5852 (None, None, false)
5853 };
5854
5855 Ok(Statement::CreateExtension {
5856 name,
5857 if_not_exists,
5858 schema,
5859 version,
5860 cascade,
5861 })
5862 }
5863
5864 pub fn parse_hive_distribution(&mut self) -> Result<HiveDistributionStyle, ParserError> {
5866 if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) {
5867 self.expect_token(&Token::LParen)?;
5868 let columns = self.parse_comma_separated(Parser::parse_column_def)?;
5869 self.expect_token(&Token::RParen)?;
5870 Ok(HiveDistributionStyle::PARTITIONED { columns })
5871 } else {
5872 Ok(HiveDistributionStyle::NONE)
5873 }
5874 }
5875
5876 pub fn parse_hive_formats(&mut self) -> Result<HiveFormat, ParserError> {
5877 let mut hive_format = HiveFormat::default();
5878 loop {
5879 match self.parse_one_of_keywords(&[
5880 Keyword::ROW,
5881 Keyword::STORED,
5882 Keyword::LOCATION,
5883 Keyword::WITH,
5884 ]) {
5885 Some(Keyword::ROW) => {
5886 hive_format.row_format = Some(self.parse_row_format()?);
5887 }
5888 Some(Keyword::STORED) => {
5889 self.expect_keyword(Keyword::AS)?;
5890 if self.parse_keyword(Keyword::INPUTFORMAT) {
5891 let input_format = self.parse_expr()?;
5892 self.expect_keyword(Keyword::OUTPUTFORMAT)?;
5893 let output_format = self.parse_expr()?;
5894 hive_format.storage = Some(HiveIOFormat::IOF {
5895 input_format,
5896 output_format,
5897 });
5898 } else {
5899 let format = self.parse_file_format()?;
5900 hive_format.storage = Some(HiveIOFormat::FileFormat { format });
5901 }
5902 }
5903 Some(Keyword::LOCATION) => {
5904 hive_format.location = Some(self.parse_literal_string()?);
5905 }
5906 Some(Keyword::WITH) => {
5907 self.prev_token();
5908 let properties = self
5909 .parse_options_with_keywords(&[Keyword::WITH, Keyword::SERDEPROPERTIES])?;
5910 if !properties.is_empty() {
5911 hive_format.serde_properties = Some(properties);
5912 } else {
5913 break;
5914 }
5915 }
5916 None => break,
5917 _ => break,
5918 }
5919 }
5920
5921 Ok(hive_format)
5922 }
5923
5924 pub fn parse_row_format(&mut self) -> Result<HiveRowFormat, ParserError> {
5925 self.expect_keyword(Keyword::FORMAT)?;
5926 match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) {
5927 Some(Keyword::SERDE) => {
5928 let class = self.parse_literal_string()?;
5929 Ok(HiveRowFormat::SERDE { class })
5930 }
5931 _ => {
5932 let mut row_delimiters = vec![];
5933
5934 loop {
5935 match self.parse_one_of_keywords(&[
5936 Keyword::FIELDS,
5937 Keyword::COLLECTION,
5938 Keyword::MAP,
5939 Keyword::LINES,
5940 Keyword::NULL,
5941 ]) {
5942 Some(Keyword::FIELDS) => {
5943 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
5944 row_delimiters.push(HiveRowDelimiter {
5945 delimiter: HiveDelimiter::FieldsTerminatedBy,
5946 char: self.parse_identifier(false)?,
5947 });
5948
5949 if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
5950 row_delimiters.push(HiveRowDelimiter {
5951 delimiter: HiveDelimiter::FieldsEscapedBy,
5952 char: self.parse_identifier(false)?,
5953 });
5954 }
5955 } else {
5956 break;
5957 }
5958 }
5959 Some(Keyword::COLLECTION) => {
5960 if self.parse_keywords(&[
5961 Keyword::ITEMS,
5962 Keyword::TERMINATED,
5963 Keyword::BY,
5964 ]) {
5965 row_delimiters.push(HiveRowDelimiter {
5966 delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
5967 char: self.parse_identifier(false)?,
5968 });
5969 } else {
5970 break;
5971 }
5972 }
5973 Some(Keyword::MAP) => {
5974 if self.parse_keywords(&[
5975 Keyword::KEYS,
5976 Keyword::TERMINATED,
5977 Keyword::BY,
5978 ]) {
5979 row_delimiters.push(HiveRowDelimiter {
5980 delimiter: HiveDelimiter::MapKeysTerminatedBy,
5981 char: self.parse_identifier(false)?,
5982 });
5983 } else {
5984 break;
5985 }
5986 }
5987 Some(Keyword::LINES) => {
5988 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
5989 row_delimiters.push(HiveRowDelimiter {
5990 delimiter: HiveDelimiter::LinesTerminatedBy,
5991 char: self.parse_identifier(false)?,
5992 });
5993 } else {
5994 break;
5995 }
5996 }
5997 Some(Keyword::NULL) => {
5998 if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) {
5999 row_delimiters.push(HiveRowDelimiter {
6000 delimiter: HiveDelimiter::NullDefinedAs,
6001 char: self.parse_identifier(false)?,
6002 });
6003 } else {
6004 break;
6005 }
6006 }
6007 _ => {
6008 break;
6009 }
6010 }
6011 }
6012
6013 Ok(HiveRowFormat::DELIMITED {
6014 delimiters: row_delimiters,
6015 })
6016 }
6017 }
6018 }
6019
6020 fn parse_optional_on_cluster(&mut self) -> Result<Option<Ident>, ParserError> {
6021 if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) {
6022 Ok(Some(self.parse_identifier(false)?))
6023 } else {
6024 Ok(None)
6025 }
6026 }
6027
6028 pub fn parse_create_table(
6029 &mut self,
6030 or_replace: bool,
6031 temporary: bool,
6032 global: Option<bool>,
6033 transient: bool,
6034 ) -> Result<Statement, ParserError> {
6035 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
6036 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6037 let table_name = self.parse_object_name(allow_unquoted_hyphen)?;
6038
6039 let on_cluster = self.parse_optional_on_cluster()?;
6041
6042 let like = if self.parse_keyword(Keyword::LIKE) || self.parse_keyword(Keyword::ILIKE) {
6043 self.parse_object_name(allow_unquoted_hyphen).ok()
6044 } else {
6045 None
6046 };
6047
6048 let clone = if self.parse_keyword(Keyword::CLONE) {
6049 self.parse_object_name(allow_unquoted_hyphen).ok()
6050 } else {
6051 None
6052 };
6053
6054 let (columns, constraints) = self.parse_columns()?;
6056 let mut comment = if dialect_of!(self is HiveDialect)
6057 && self.parse_keyword(Keyword::COMMENT)
6058 {
6059 let next_token = self.next_token();
6060 match next_token.token {
6061 Token::SingleQuotedString(str) => Some(CommentDef::AfterColumnDefsWithoutEq(str)),
6062 _ => self.expected("comment", next_token)?,
6063 }
6064 } else {
6065 None
6066 };
6067
6068 let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]);
6070
6071 let hive_distribution = self.parse_hive_distribution()?;
6072 let clustered_by = self.parse_optional_clustered_by()?;
6073 let hive_formats = self.parse_hive_formats()?;
6074 let with_options = self.parse_options(Keyword::WITH)?;
6076 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
6077
6078 let engine = if self.parse_keyword(Keyword::ENGINE) {
6079 self.expect_token(&Token::Eq)?;
6080 let next_token = self.next_token();
6081 match next_token.token {
6082 Token::Word(w) => {
6083 let name = w.value;
6084 let parameters = if self.peek_token() == Token::LParen {
6085 Some(self.parse_parenthesized_identifiers()?)
6086 } else {
6087 None
6088 };
6089 Some(TableEngine { name, parameters })
6090 }
6091 _ => self.expected("identifier", next_token)?,
6092 }
6093 } else {
6094 None
6095 };
6096
6097 let auto_increment_offset = if self.parse_keyword(Keyword::AUTO_INCREMENT) {
6098 let _ = self.consume_token(&Token::Eq);
6099 let next_token = self.next_token();
6100 match next_token.token {
6101 Token::Number(s, _) => Some(Self::parse::<u32>(s, next_token.span.start)?),
6102 _ => self.expected("literal int", next_token)?,
6103 }
6104 } else {
6105 None
6106 };
6107
6108 let primary_key = if dialect_of!(self is ClickHouseDialect | GenericDialect)
6111 && self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
6112 {
6113 Some(Box::new(self.parse_expr()?))
6114 } else {
6115 None
6116 };
6117
6118 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
6119 if self.consume_token(&Token::LParen) {
6120 let columns = if self.peek_token() != Token::RParen {
6121 self.parse_comma_separated(|p| p.parse_expr())?
6122 } else {
6123 vec![]
6124 };
6125 self.expect_token(&Token::RParen)?;
6126 Some(OneOrManyWithParens::Many(columns))
6127 } else {
6128 Some(OneOrManyWithParens::One(self.parse_expr()?))
6129 }
6130 } else {
6131 None
6132 };
6133
6134 let create_table_config = self.parse_optional_create_table_config()?;
6135
6136 let default_charset = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) {
6137 self.expect_token(&Token::Eq)?;
6138 let next_token = self.next_token();
6139 match next_token.token {
6140 Token::Word(w) => Some(w.value),
6141 _ => self.expected("identifier", next_token)?,
6142 }
6143 } else {
6144 None
6145 };
6146
6147 let collation = if self.parse_keywords(&[Keyword::COLLATE]) {
6148 self.expect_token(&Token::Eq)?;
6149 let next_token = self.next_token();
6150 match next_token.token {
6151 Token::Word(w) => Some(w.value),
6152 _ => self.expected("identifier", next_token)?,
6153 }
6154 } else {
6155 None
6156 };
6157
6158 let on_commit: Option<OnCommit> =
6159 if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT, Keyword::DELETE, Keyword::ROWS])
6160 {
6161 Some(OnCommit::DeleteRows)
6162 } else if self.parse_keywords(&[
6163 Keyword::ON,
6164 Keyword::COMMIT,
6165 Keyword::PRESERVE,
6166 Keyword::ROWS,
6167 ]) {
6168 Some(OnCommit::PreserveRows)
6169 } else if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT, Keyword::DROP]) {
6170 Some(OnCommit::Drop)
6171 } else {
6172 None
6173 };
6174
6175 let strict = self.parse_keyword(Keyword::STRICT);
6176
6177 if !dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) {
6179 self.prev_token();
6181 comment = self.parse_optional_inline_comment()?
6182 };
6183
6184 let query = if self.parse_keyword(Keyword::AS) {
6186 Some(self.parse_query()?)
6187 } else if self.dialect.supports_create_table_select() && self.parse_keyword(Keyword::SELECT)
6188 {
6189 self.prev_token();
6191 Some(self.parse_query()?)
6192 } else {
6193 None
6194 };
6195
6196 Ok(CreateTableBuilder::new(table_name)
6197 .temporary(temporary)
6198 .columns(columns)
6199 .constraints(constraints)
6200 .with_options(with_options)
6201 .table_properties(table_properties)
6202 .or_replace(or_replace)
6203 .if_not_exists(if_not_exists)
6204 .transient(transient)
6205 .hive_distribution(hive_distribution)
6206 .hive_formats(Some(hive_formats))
6207 .global(global)
6208 .query(query)
6209 .without_rowid(without_rowid)
6210 .like(like)
6211 .clone_clause(clone)
6212 .engine(engine)
6213 .comment(comment)
6214 .auto_increment_offset(auto_increment_offset)
6215 .order_by(order_by)
6216 .default_charset(default_charset)
6217 .collation(collation)
6218 .on_commit(on_commit)
6219 .on_cluster(on_cluster)
6220 .clustered_by(clustered_by)
6221 .partition_by(create_table_config.partition_by)
6222 .cluster_by(create_table_config.cluster_by)
6223 .options(create_table_config.options)
6224 .primary_key(primary_key)
6225 .strict(strict)
6226 .build())
6227 }
6228
6229 fn parse_optional_create_table_config(
6234 &mut self,
6235 ) -> Result<CreateTableConfiguration, ParserError> {
6236 let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
6237 && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY])
6238 {
6239 Some(Box::new(self.parse_expr()?))
6240 } else {
6241 None
6242 };
6243
6244 let mut cluster_by = None;
6245 let mut options = None;
6246 if dialect_of!(self is BigQueryDialect | GenericDialect) {
6247 if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
6248 cluster_by = Some(WrappedCollection::NoWrapping(
6249 self.parse_comma_separated(|p| p.parse_identifier(false))?,
6250 ));
6251 };
6252
6253 if let Token::Word(word) = self.peek_token().token {
6254 if word.keyword == Keyword::OPTIONS {
6255 options = Some(self.parse_options(Keyword::OPTIONS)?);
6256 }
6257 };
6258 }
6259
6260 Ok(CreateTableConfiguration {
6261 partition_by,
6262 cluster_by,
6263 options,
6264 })
6265 }
6266
6267 pub fn parse_optional_inline_comment(&mut self) -> Result<Option<CommentDef>, ParserError> {
6268 let comment = if self.parse_keyword(Keyword::COMMENT) {
6269 let has_eq = self.consume_token(&Token::Eq);
6270 let next_token = self.next_token();
6271 match next_token.token {
6272 Token::SingleQuotedString(str) => Some(if has_eq {
6273 CommentDef::WithEq(str)
6274 } else {
6275 CommentDef::WithoutEq(str)
6276 }),
6277 _ => self.expected("comment", next_token)?,
6278 }
6279 } else {
6280 None
6281 };
6282 Ok(comment)
6283 }
6284
6285 pub fn parse_optional_procedure_parameters(
6286 &mut self,
6287 ) -> Result<Option<Vec<ProcedureParam>>, ParserError> {
6288 let mut params = vec![];
6289 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
6290 return Ok(Some(params));
6291 }
6292 loop {
6293 if let Token::Word(_) = self.peek_token().token {
6294 params.push(self.parse_procedure_param()?)
6295 }
6296 let comma = self.consume_token(&Token::Comma);
6297 if self.consume_token(&Token::RParen) {
6298 break;
6300 } else if !comma {
6301 return self.expected("',' or ')' after parameter definition", self.peek_token());
6302 }
6303 }
6304 Ok(Some(params))
6305 }
6306
6307 pub fn parse_columns(&mut self) -> Result<(Vec<ColumnDef>, Vec<TableConstraint>), ParserError> {
6308 let mut columns = vec![];
6309 let mut constraints = vec![];
6310 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
6311 return Ok((columns, constraints));
6312 }
6313
6314 loop {
6315 if let Some(constraint) = self.parse_optional_table_constraint()? {
6316 constraints.push(constraint);
6317 } else if let Token::Word(_) = self.peek_token().token {
6318 columns.push(self.parse_column_def()?);
6319 } else {
6320 return self.expected("column name or constraint definition", self.peek_token());
6321 }
6322
6323 let comma = self.consume_token(&Token::Comma);
6324 let rparen = self.peek_token().token == Token::RParen;
6325
6326 if !comma && !rparen {
6327 return self.expected("',' or ')' after column definition", self.peek_token());
6328 };
6329
6330 if rparen && (!comma || self.options.trailing_commas) {
6331 let _ = self.consume_token(&Token::RParen);
6332 break;
6333 }
6334 }
6335
6336 Ok((columns, constraints))
6337 }
6338
6339 pub fn parse_procedure_param(&mut self) -> Result<ProcedureParam, ParserError> {
6340 let name = self.parse_identifier(false)?;
6341 let data_type = self.parse_data_type()?;
6342 Ok(ProcedureParam { name, data_type })
6343 }
6344
6345 pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
6346 let name = self.parse_identifier(false)?;
6347 let data_type = if self.is_column_type_sqlite_unspecified() {
6348 DataType::Unspecified
6349 } else {
6350 self.parse_data_type()?
6351 };
6352 let mut collation = if self.parse_keyword(Keyword::COLLATE) {
6353 Some(self.parse_object_name(false)?)
6354 } else {
6355 None
6356 };
6357 let mut options = vec![];
6358 loop {
6359 if self.parse_keyword(Keyword::CONSTRAINT) {
6360 let name = Some(self.parse_identifier(false)?);
6361 if let Some(option) = self.parse_optional_column_option()? {
6362 options.push(ColumnOptionDef { name, option });
6363 } else {
6364 return self.expected(
6365 "constraint details after CONSTRAINT <name>",
6366 self.peek_token(),
6367 );
6368 }
6369 } else if let Some(option) = self.parse_optional_column_option()? {
6370 options.push(ColumnOptionDef { name: None, option });
6371 } else if dialect_of!(self is MySqlDialect | SnowflakeDialect | GenericDialect)
6372 && self.parse_keyword(Keyword::COLLATE)
6373 {
6374 collation = Some(self.parse_object_name(false)?);
6375 } else {
6376 break;
6377 };
6378 }
6379 Ok(ColumnDef {
6380 name,
6381 data_type,
6382 collation,
6383 options,
6384 })
6385 }
6386
6387 fn is_column_type_sqlite_unspecified(&mut self) -> bool {
6388 if dialect_of!(self is SQLiteDialect) {
6389 match self.peek_token().token {
6390 Token::Word(word) => matches!(
6391 word.keyword,
6392 Keyword::CONSTRAINT
6393 | Keyword::PRIMARY
6394 | Keyword::NOT
6395 | Keyword::UNIQUE
6396 | Keyword::CHECK
6397 | Keyword::DEFAULT
6398 | Keyword::COLLATE
6399 | Keyword::REFERENCES
6400 | Keyword::GENERATED
6401 | Keyword::AS
6402 ),
6403 _ => true, }
6405 } else {
6406 false
6407 }
6408 }
6409
6410 pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
6411 if let Some(option) = self.dialect.parse_column_option(self)? {
6412 return option;
6413 }
6414
6415 if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
6416 Ok(Some(ColumnOption::CharacterSet(
6417 self.parse_object_name(false)?,
6418 )))
6419 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
6420 Ok(Some(ColumnOption::NotNull))
6421 } else if self.parse_keywords(&[Keyword::COMMENT]) {
6422 let next_token = self.next_token();
6423 match next_token.token {
6424 Token::SingleQuotedString(value, ..) => Ok(Some(ColumnOption::Comment(value))),
6425 _ => self.expected("string", next_token),
6426 }
6427 } else if self.parse_keyword(Keyword::NULL) {
6428 Ok(Some(ColumnOption::Null))
6429 } else if self.parse_keyword(Keyword::DEFAULT) {
6430 Ok(Some(ColumnOption::Default(self.parse_expr()?)))
6431 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
6432 && self.parse_keyword(Keyword::MATERIALIZED)
6433 {
6434 Ok(Some(ColumnOption::Materialized(self.parse_expr()?)))
6435 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
6436 && self.parse_keyword(Keyword::ALIAS)
6437 {
6438 Ok(Some(ColumnOption::Alias(self.parse_expr()?)))
6439 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
6440 && self.parse_keyword(Keyword::EPHEMERAL)
6441 {
6442 if matches!(self.peek_token().token, Token::Comma | Token::RParen) {
6445 Ok(Some(ColumnOption::Ephemeral(None)))
6446 } else {
6447 Ok(Some(ColumnOption::Ephemeral(Some(self.parse_expr()?))))
6448 }
6449 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
6450 let characteristics = self.parse_constraint_characteristics()?;
6451 Ok(Some(ColumnOption::Unique {
6452 is_primary: true,
6453 characteristics,
6454 }))
6455 } else if self.parse_keyword(Keyword::UNIQUE) {
6456 let characteristics = self.parse_constraint_characteristics()?;
6457 Ok(Some(ColumnOption::Unique {
6458 is_primary: false,
6459 characteristics,
6460 }))
6461 } else if self.parse_keyword(Keyword::REFERENCES) {
6462 let foreign_table = self.parse_object_name(false)?;
6463 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
6466 let mut on_delete = None;
6467 let mut on_update = None;
6468 loop {
6469 if on_delete.is_none() && self.parse_keywords(&[Keyword::ON, Keyword::DELETE]) {
6470 on_delete = Some(self.parse_referential_action()?);
6471 } else if on_update.is_none()
6472 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
6473 {
6474 on_update = Some(self.parse_referential_action()?);
6475 } else {
6476 break;
6477 }
6478 }
6479 let characteristics = self.parse_constraint_characteristics()?;
6480
6481 Ok(Some(ColumnOption::ForeignKey {
6482 foreign_table,
6483 referred_columns,
6484 on_delete,
6485 on_update,
6486 characteristics,
6487 }))
6488 } else if self.parse_keyword(Keyword::CHECK) {
6489 self.expect_token(&Token::LParen)?;
6490 let expr = self.parse_expr()?;
6491 self.expect_token(&Token::RParen)?;
6492 Ok(Some(ColumnOption::Check(expr)))
6493 } else if self.parse_keyword(Keyword::AUTO_INCREMENT)
6494 && dialect_of!(self is MySqlDialect | GenericDialect)
6495 {
6496 Ok(Some(ColumnOption::DialectSpecific(vec![
6498 Token::make_keyword("AUTO_INCREMENT"),
6499 ])))
6500 } else if self.parse_keyword(Keyword::AUTOINCREMENT)
6501 && dialect_of!(self is SQLiteDialect | GenericDialect)
6502 {
6503 Ok(Some(ColumnOption::DialectSpecific(vec![
6505 Token::make_keyword("AUTOINCREMENT"),
6506 ])))
6507 } else if self.parse_keyword(Keyword::ASC)
6508 && self.dialect.supports_asc_desc_in_column_definition()
6509 {
6510 Ok(Some(ColumnOption::DialectSpecific(vec![
6512 Token::make_keyword("ASC"),
6513 ])))
6514 } else if self.parse_keyword(Keyword::DESC)
6515 && self.dialect.supports_asc_desc_in_column_definition()
6516 {
6517 Ok(Some(ColumnOption::DialectSpecific(vec![
6519 Token::make_keyword("DESC"),
6520 ])))
6521 } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
6522 && dialect_of!(self is MySqlDialect | GenericDialect)
6523 {
6524 let expr = self.parse_expr()?;
6525 Ok(Some(ColumnOption::OnUpdate(expr)))
6526 } else if self.parse_keyword(Keyword::GENERATED) {
6527 self.parse_optional_column_option_generated()
6528 } else if dialect_of!(self is BigQueryDialect | GenericDialect)
6529 && self.parse_keyword(Keyword::OPTIONS)
6530 {
6531 self.prev_token();
6532 Ok(Some(ColumnOption::Options(
6533 self.parse_options(Keyword::OPTIONS)?,
6534 )))
6535 } else if self.parse_keyword(Keyword::AS)
6536 && dialect_of!(self is MySqlDialect | SQLiteDialect | DuckDbDialect | GenericDialect)
6537 {
6538 self.parse_optional_column_option_as()
6539 } else if self.parse_keyword(Keyword::IDENTITY)
6540 && dialect_of!(self is MsSqlDialect | GenericDialect)
6541 {
6542 let parameters = if self.consume_token(&Token::LParen) {
6543 let seed = self.parse_number()?;
6544 self.expect_token(&Token::Comma)?;
6545 let increment = self.parse_number()?;
6546 self.expect_token(&Token::RParen)?;
6547
6548 Some(IdentityPropertyFormatKind::FunctionCall(
6549 IdentityParameters { seed, increment },
6550 ))
6551 } else {
6552 None
6553 };
6554 Ok(Some(ColumnOption::Identity(
6555 IdentityPropertyKind::Identity(IdentityProperty {
6556 parameters,
6557 order: None,
6558 }),
6559 )))
6560 } else if dialect_of!(self is SQLiteDialect | GenericDialect)
6561 && self.parse_keywords(&[Keyword::ON, Keyword::CONFLICT])
6562 {
6563 Ok(Some(ColumnOption::OnConflict(
6565 self.expect_one_of_keywords(&[
6566 Keyword::ROLLBACK,
6567 Keyword::ABORT,
6568 Keyword::FAIL,
6569 Keyword::IGNORE,
6570 Keyword::REPLACE,
6571 ])?,
6572 )))
6573 } else {
6574 Ok(None)
6575 }
6576 }
6577
6578 pub(crate) fn parse_tag(&mut self) -> Result<Tag, ParserError> {
6579 let name = self.parse_identifier(false)?;
6580 self.expect_token(&Token::Eq)?;
6581 let value = self.parse_literal_string()?;
6582
6583 Ok(Tag::new(name, value))
6584 }
6585
6586 fn parse_optional_column_option_generated(
6587 &mut self,
6588 ) -> Result<Option<ColumnOption>, ParserError> {
6589 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) {
6590 let mut sequence_options = vec![];
6591 if self.expect_token(&Token::LParen).is_ok() {
6592 sequence_options = self.parse_create_sequence_options()?;
6593 self.expect_token(&Token::RParen)?;
6594 }
6595 Ok(Some(ColumnOption::Generated {
6596 generated_as: GeneratedAs::Always,
6597 sequence_options: Some(sequence_options),
6598 generation_expr: None,
6599 generation_expr_mode: None,
6600 generated_keyword: true,
6601 }))
6602 } else if self.parse_keywords(&[
6603 Keyword::BY,
6604 Keyword::DEFAULT,
6605 Keyword::AS,
6606 Keyword::IDENTITY,
6607 ]) {
6608 let mut sequence_options = vec![];
6609 if self.expect_token(&Token::LParen).is_ok() {
6610 sequence_options = self.parse_create_sequence_options()?;
6611 self.expect_token(&Token::RParen)?;
6612 }
6613 Ok(Some(ColumnOption::Generated {
6614 generated_as: GeneratedAs::ByDefault,
6615 sequence_options: Some(sequence_options),
6616 generation_expr: None,
6617 generation_expr_mode: None,
6618 generated_keyword: true,
6619 }))
6620 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) {
6621 if self.expect_token(&Token::LParen).is_ok() {
6622 let expr = self.parse_expr()?;
6623 self.expect_token(&Token::RParen)?;
6624 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
6625 Ok((
6626 GeneratedAs::ExpStored,
6627 Some(GeneratedExpressionMode::Stored),
6628 ))
6629 } else if dialect_of!(self is PostgreSqlDialect) {
6630 self.expected("STORED", self.peek_token())
6632 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
6633 Ok((GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual)))
6634 } else {
6635 Ok((GeneratedAs::Always, None))
6636 }?;
6637
6638 Ok(Some(ColumnOption::Generated {
6639 generated_as: gen_as,
6640 sequence_options: None,
6641 generation_expr: Some(expr),
6642 generation_expr_mode: expr_mode,
6643 generated_keyword: true,
6644 }))
6645 } else {
6646 Ok(None)
6647 }
6648 } else {
6649 Ok(None)
6650 }
6651 }
6652
6653 fn parse_optional_column_option_as(&mut self) -> Result<Option<ColumnOption>, ParserError> {
6654 self.expect_token(&Token::LParen)?;
6656 let expr = self.parse_expr()?;
6657 self.expect_token(&Token::RParen)?;
6658
6659 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
6660 (
6661 GeneratedAs::ExpStored,
6662 Some(GeneratedExpressionMode::Stored),
6663 )
6664 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
6665 (GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual))
6666 } else {
6667 (GeneratedAs::Always, None)
6668 };
6669
6670 Ok(Some(ColumnOption::Generated {
6671 generated_as: gen_as,
6672 sequence_options: None,
6673 generation_expr: Some(expr),
6674 generation_expr_mode: expr_mode,
6675 generated_keyword: false,
6676 }))
6677 }
6678
6679 pub fn parse_optional_clustered_by(&mut self) -> Result<Option<ClusteredBy>, ParserError> {
6680 let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect)
6681 && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY])
6682 {
6683 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
6684
6685 let sorted_by = if self.parse_keywords(&[Keyword::SORTED, Keyword::BY]) {
6686 self.expect_token(&Token::LParen)?;
6687 let sorted_by_columns = self.parse_comma_separated(|p| p.parse_order_by_expr())?;
6688 self.expect_token(&Token::RParen)?;
6689 Some(sorted_by_columns)
6690 } else {
6691 None
6692 };
6693
6694 self.expect_keyword(Keyword::INTO)?;
6695 let num_buckets = self.parse_number_value()?;
6696 self.expect_keyword(Keyword::BUCKETS)?;
6697 Some(ClusteredBy {
6698 columns,
6699 sorted_by,
6700 num_buckets,
6701 })
6702 } else {
6703 None
6704 };
6705 Ok(clustered_by)
6706 }
6707
6708 pub fn parse_referential_action(&mut self) -> Result<ReferentialAction, ParserError> {
6709 if self.parse_keyword(Keyword::RESTRICT) {
6710 Ok(ReferentialAction::Restrict)
6711 } else if self.parse_keyword(Keyword::CASCADE) {
6712 Ok(ReferentialAction::Cascade)
6713 } else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
6714 Ok(ReferentialAction::SetNull)
6715 } else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
6716 Ok(ReferentialAction::NoAction)
6717 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
6718 Ok(ReferentialAction::SetDefault)
6719 } else {
6720 self.expected(
6721 "one of RESTRICT, CASCADE, SET NULL, NO ACTION or SET DEFAULT",
6722 self.peek_token(),
6723 )
6724 }
6725 }
6726
6727 pub fn parse_constraint_characteristics(
6728 &mut self,
6729 ) -> Result<Option<ConstraintCharacteristics>, ParserError> {
6730 let mut cc = ConstraintCharacteristics::default();
6731
6732 loop {
6733 if cc.deferrable.is_none() && self.parse_keywords(&[Keyword::NOT, Keyword::DEFERRABLE])
6734 {
6735 cc.deferrable = Some(false);
6736 } else if cc.deferrable.is_none() && self.parse_keyword(Keyword::DEFERRABLE) {
6737 cc.deferrable = Some(true);
6738 } else if cc.initially.is_none() && self.parse_keyword(Keyword::INITIALLY) {
6739 if self.parse_keyword(Keyword::DEFERRED) {
6740 cc.initially = Some(DeferrableInitial::Deferred);
6741 } else if self.parse_keyword(Keyword::IMMEDIATE) {
6742 cc.initially = Some(DeferrableInitial::Immediate);
6743 } else {
6744 self.expected("one of DEFERRED or IMMEDIATE", self.peek_token())?;
6745 }
6746 } else if cc.enforced.is_none() && self.parse_keyword(Keyword::ENFORCED) {
6747 cc.enforced = Some(true);
6748 } else if cc.enforced.is_none()
6749 && self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED])
6750 {
6751 cc.enforced = Some(false);
6752 } else {
6753 break;
6754 }
6755 }
6756
6757 if cc.deferrable.is_some() || cc.initially.is_some() || cc.enforced.is_some() {
6758 Ok(Some(cc))
6759 } else {
6760 Ok(None)
6761 }
6762 }
6763
6764 pub fn parse_optional_table_constraint(
6765 &mut self,
6766 ) -> Result<Option<TableConstraint>, ParserError> {
6767 let name = if self.parse_keyword(Keyword::CONSTRAINT) {
6768 Some(self.parse_identifier(false)?)
6769 } else {
6770 None
6771 };
6772
6773 let next_token = self.next_token();
6774 match next_token.token {
6775 Token::Word(w) if w.keyword == Keyword::UNIQUE => {
6776 let index_type_display = self.parse_index_type_display();
6777 if !dialect_of!(self is GenericDialect | MySqlDialect)
6778 && !index_type_display.is_none()
6779 {
6780 return self
6781 .expected("`index_name` or `(column_name [, ...])`", self.peek_token());
6782 }
6783
6784 let nulls_distinct = self.parse_optional_nulls_distinct()?;
6785
6786 let index_name = self.parse_optional_indent()?;
6788 let index_type = self.parse_optional_using_then_index_type()?;
6789
6790 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
6791 let index_options = self.parse_index_options()?;
6792 let characteristics = self.parse_constraint_characteristics()?;
6793 Ok(Some(TableConstraint::Unique {
6794 name,
6795 index_name,
6796 index_type_display,
6797 index_type,
6798 columns,
6799 index_options,
6800 characteristics,
6801 nulls_distinct,
6802 }))
6803 }
6804 Token::Word(w) if w.keyword == Keyword::PRIMARY => {
6805 self.expect_keyword(Keyword::KEY)?;
6807
6808 let index_name = self.parse_optional_indent()?;
6810 let index_type = self.parse_optional_using_then_index_type()?;
6811
6812 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
6813 let index_options = self.parse_index_options()?;
6814 let characteristics = self.parse_constraint_characteristics()?;
6815 Ok(Some(TableConstraint::PrimaryKey {
6816 name,
6817 index_name,
6818 index_type,
6819 columns,
6820 index_options,
6821 characteristics,
6822 }))
6823 }
6824 Token::Word(w) if w.keyword == Keyword::FOREIGN => {
6825 self.expect_keyword(Keyword::KEY)?;
6826 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
6827 self.expect_keyword(Keyword::REFERENCES)?;
6828 let foreign_table = self.parse_object_name(false)?;
6829 let referred_columns = self.parse_parenthesized_column_list(Mandatory, false)?;
6830 let mut on_delete = None;
6831 let mut on_update = None;
6832 loop {
6833 if on_delete.is_none() && self.parse_keywords(&[Keyword::ON, Keyword::DELETE]) {
6834 on_delete = Some(self.parse_referential_action()?);
6835 } else if on_update.is_none()
6836 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
6837 {
6838 on_update = Some(self.parse_referential_action()?);
6839 } else {
6840 break;
6841 }
6842 }
6843
6844 let characteristics = self.parse_constraint_characteristics()?;
6845
6846 Ok(Some(TableConstraint::ForeignKey {
6847 name,
6848 columns,
6849 foreign_table,
6850 referred_columns,
6851 on_delete,
6852 on_update,
6853 characteristics,
6854 }))
6855 }
6856 Token::Word(w) if w.keyword == Keyword::CHECK => {
6857 self.expect_token(&Token::LParen)?;
6858 let expr = Box::new(self.parse_expr()?);
6859 self.expect_token(&Token::RParen)?;
6860 Ok(Some(TableConstraint::Check { name, expr }))
6861 }
6862 Token::Word(w)
6863 if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
6864 && dialect_of!(self is GenericDialect | MySqlDialect)
6865 && name.is_none() =>
6866 {
6867 let display_as_key = w.keyword == Keyword::KEY;
6868
6869 let name = match self.peek_token().token {
6870 Token::Word(word) if word.keyword == Keyword::USING => None,
6871 _ => self.parse_optional_indent()?,
6872 };
6873
6874 let index_type = self.parse_optional_using_then_index_type()?;
6875 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
6876
6877 Ok(Some(TableConstraint::Index {
6878 display_as_key,
6879 name,
6880 index_type,
6881 columns,
6882 }))
6883 }
6884 Token::Word(w)
6885 if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL)
6886 && dialect_of!(self is GenericDialect | MySqlDialect) =>
6887 {
6888 if let Some(name) = name {
6889 return self.expected(
6890 "FULLTEXT or SPATIAL option without constraint name",
6891 TokenWithSpan {
6892 token: Token::make_keyword(&name.to_string()),
6893 span: next_token.span,
6894 },
6895 );
6896 }
6897
6898 let fulltext = w.keyword == Keyword::FULLTEXT;
6899
6900 let index_type_display = self.parse_index_type_display();
6901
6902 let opt_index_name = self.parse_optional_indent()?;
6903
6904 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
6905
6906 Ok(Some(TableConstraint::FulltextOrSpatial {
6907 fulltext,
6908 index_type_display,
6909 opt_index_name,
6910 columns,
6911 }))
6912 }
6913 _ => {
6914 if name.is_some() {
6915 self.expected("PRIMARY, UNIQUE, FOREIGN, or CHECK", next_token)
6916 } else {
6917 self.prev_token();
6918 Ok(None)
6919 }
6920 }
6921 }
6922 }
6923
6924 fn parse_optional_nulls_distinct(&mut self) -> Result<NullsDistinctOption, ParserError> {
6925 Ok(if self.parse_keyword(Keyword::NULLS) {
6926 let not = self.parse_keyword(Keyword::NOT);
6927 self.expect_keyword(Keyword::DISTINCT)?;
6928 if not {
6929 NullsDistinctOption::NotDistinct
6930 } else {
6931 NullsDistinctOption::Distinct
6932 }
6933 } else {
6934 NullsDistinctOption::None
6935 })
6936 }
6937
6938 pub fn maybe_parse_options(
6939 &mut self,
6940 keyword: Keyword,
6941 ) -> Result<Option<Vec<SqlOption>>, ParserError> {
6942 if let Token::Word(word) = self.peek_token().token {
6943 if word.keyword == keyword {
6944 return Ok(Some(self.parse_options(keyword)?));
6945 }
6946 };
6947 Ok(None)
6948 }
6949
6950 pub fn parse_options(&mut self, keyword: Keyword) -> Result<Vec<SqlOption>, ParserError> {
6951 if self.parse_keyword(keyword) {
6952 self.expect_token(&Token::LParen)?;
6953 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
6954 self.expect_token(&Token::RParen)?;
6955 Ok(options)
6956 } else {
6957 Ok(vec![])
6958 }
6959 }
6960
6961 pub fn parse_options_with_keywords(
6962 &mut self,
6963 keywords: &[Keyword],
6964 ) -> Result<Vec<SqlOption>, ParserError> {
6965 if self.parse_keywords(keywords) {
6966 self.expect_token(&Token::LParen)?;
6967 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
6968 self.expect_token(&Token::RParen)?;
6969 Ok(options)
6970 } else {
6971 Ok(vec![])
6972 }
6973 }
6974
6975 pub fn parse_index_type(&mut self) -> Result<IndexType, ParserError> {
6976 if self.parse_keyword(Keyword::BTREE) {
6977 Ok(IndexType::BTree)
6978 } else if self.parse_keyword(Keyword::HASH) {
6979 Ok(IndexType::Hash)
6980 } else {
6981 self.expected("index type {BTREE | HASH}", self.peek_token())
6982 }
6983 }
6984
6985 pub fn parse_optional_using_then_index_type(
6987 &mut self,
6988 ) -> Result<Option<IndexType>, ParserError> {
6989 if self.parse_keyword(Keyword::USING) {
6990 Ok(Some(self.parse_index_type()?))
6991 } else {
6992 Ok(None)
6993 }
6994 }
6995
6996 pub fn parse_optional_indent(&mut self) -> Result<Option<Ident>, ParserError> {
6999 self.maybe_parse(|parser| parser.parse_identifier(false))
7000 }
7001
7002 #[must_use]
7003 pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay {
7004 if self.parse_keyword(Keyword::KEY) {
7005 KeyOrIndexDisplay::Key
7006 } else if self.parse_keyword(Keyword::INDEX) {
7007 KeyOrIndexDisplay::Index
7008 } else {
7009 KeyOrIndexDisplay::None
7010 }
7011 }
7012
7013 pub fn parse_optional_index_option(&mut self) -> Result<Option<IndexOption>, ParserError> {
7014 if let Some(index_type) = self.parse_optional_using_then_index_type()? {
7015 Ok(Some(IndexOption::Using(index_type)))
7016 } else if self.parse_keyword(Keyword::COMMENT) {
7017 let s = self.parse_literal_string()?;
7018 Ok(Some(IndexOption::Comment(s)))
7019 } else {
7020 Ok(None)
7021 }
7022 }
7023
7024 pub fn parse_index_options(&mut self) -> Result<Vec<IndexOption>, ParserError> {
7025 let mut options = Vec::new();
7026
7027 loop {
7028 match self.parse_optional_index_option()? {
7029 Some(index_option) => options.push(index_option),
7030 None => return Ok(options),
7031 }
7032 }
7033 }
7034
7035 pub fn parse_sql_option(&mut self) -> Result<SqlOption, ParserError> {
7036 let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect);
7037
7038 match self.peek_token().token {
7039 Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => {
7040 Ok(SqlOption::Ident(self.parse_identifier(false)?))
7041 }
7042 Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => {
7043 self.parse_option_partition()
7044 }
7045 Token::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => {
7046 self.parse_option_clustered()
7047 }
7048 _ => {
7049 let name = self.parse_identifier(false)?;
7050 self.expect_token(&Token::Eq)?;
7051 let value = self.parse_expr()?;
7052
7053 Ok(SqlOption::KeyValue { key: name, value })
7054 }
7055 }
7056 }
7057
7058 pub fn parse_option_clustered(&mut self) -> Result<SqlOption, ParserError> {
7059 if self.parse_keywords(&[
7060 Keyword::CLUSTERED,
7061 Keyword::COLUMNSTORE,
7062 Keyword::INDEX,
7063 Keyword::ORDER,
7064 ]) {
7065 Ok(SqlOption::Clustered(
7066 TableOptionsClustered::ColumnstoreIndexOrder(
7067 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
7068 ),
7069 ))
7070 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::COLUMNSTORE, Keyword::INDEX]) {
7071 Ok(SqlOption::Clustered(
7072 TableOptionsClustered::ColumnstoreIndex,
7073 ))
7074 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::INDEX]) {
7075 self.expect_token(&Token::LParen)?;
7076
7077 let columns = self.parse_comma_separated(|p| {
7078 let name = p.parse_identifier(false)?;
7079 let asc = p.parse_asc_desc();
7080
7081 Ok(ClusteredIndex { name, asc })
7082 })?;
7083
7084 self.expect_token(&Token::RParen)?;
7085
7086 Ok(SqlOption::Clustered(TableOptionsClustered::Index(columns)))
7087 } else {
7088 Err(ParserError::ParserError(
7089 "invalid CLUSTERED sequence".to_string(),
7090 ))
7091 }
7092 }
7093
7094 pub fn parse_option_partition(&mut self) -> Result<SqlOption, ParserError> {
7095 self.expect_keyword(Keyword::PARTITION)?;
7096 self.expect_token(&Token::LParen)?;
7097 let column_name = self.parse_identifier(false)?;
7098
7099 self.expect_keyword(Keyword::RANGE)?;
7100 let range_direction = if self.parse_keyword(Keyword::LEFT) {
7101 Some(PartitionRangeDirection::Left)
7102 } else if self.parse_keyword(Keyword::RIGHT) {
7103 Some(PartitionRangeDirection::Right)
7104 } else {
7105 None
7106 };
7107
7108 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
7109 self.expect_token(&Token::LParen)?;
7110
7111 let for_values = self.parse_comma_separated(Parser::parse_expr)?;
7112
7113 self.expect_token(&Token::RParen)?;
7114 self.expect_token(&Token::RParen)?;
7115
7116 Ok(SqlOption::Partition {
7117 column_name,
7118 range_direction,
7119 for_values,
7120 })
7121 }
7122
7123 pub fn parse_partition(&mut self) -> Result<Partition, ParserError> {
7124 self.expect_token(&Token::LParen)?;
7125 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
7126 self.expect_token(&Token::RParen)?;
7127 Ok(Partition::Partitions(partitions))
7128 }
7129
7130 pub fn parse_projection_select(&mut self) -> Result<ProjectionSelect, ParserError> {
7131 self.expect_token(&Token::LParen)?;
7132 self.expect_keyword(Keyword::SELECT)?;
7133 let projection = self.parse_projection()?;
7134 let group_by = self.parse_optional_group_by()?;
7135 let order_by = self.parse_optional_order_by()?;
7136 self.expect_token(&Token::RParen)?;
7137 Ok(ProjectionSelect {
7138 projection,
7139 group_by,
7140 order_by,
7141 })
7142 }
7143 pub fn parse_alter_table_add_projection(&mut self) -> Result<AlterTableOperation, ParserError> {
7144 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7145 let name = self.parse_identifier(false)?;
7146 let query = self.parse_projection_select()?;
7147 Ok(AlterTableOperation::AddProjection {
7148 if_not_exists,
7149 name,
7150 select: query,
7151 })
7152 }
7153
7154 pub fn parse_alter_table_operation(&mut self) -> Result<AlterTableOperation, ParserError> {
7155 let operation = if self.parse_keyword(Keyword::ADD) {
7156 if let Some(constraint) = self.parse_optional_table_constraint()? {
7157 AlterTableOperation::AddConstraint(constraint)
7158 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
7159 && self.parse_keyword(Keyword::PROJECTION)
7160 {
7161 return self.parse_alter_table_add_projection();
7162 } else {
7163 let if_not_exists =
7164 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7165 let mut new_partitions = vec![];
7166 loop {
7167 if self.parse_keyword(Keyword::PARTITION) {
7168 new_partitions.push(self.parse_partition()?);
7169 } else {
7170 break;
7171 }
7172 }
7173 if !new_partitions.is_empty() {
7174 AlterTableOperation::AddPartitions {
7175 if_not_exists,
7176 new_partitions,
7177 }
7178 } else {
7179 let column_keyword = self.parse_keyword(Keyword::COLUMN);
7180
7181 let if_not_exists = if dialect_of!(self is PostgreSqlDialect | BigQueryDialect | DuckDbDialect | GenericDialect)
7182 {
7183 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS])
7184 || if_not_exists
7185 } else {
7186 false
7187 };
7188
7189 let column_def = self.parse_column_def()?;
7190
7191 let column_position = self.parse_column_position()?;
7192
7193 AlterTableOperation::AddColumn {
7194 column_keyword,
7195 if_not_exists,
7196 column_def,
7197 column_position,
7198 }
7199 }
7200 }
7201 } else if self.parse_keyword(Keyword::RENAME) {
7202 if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) {
7203 let old_name = self.parse_identifier(false)?;
7204 self.expect_keyword(Keyword::TO)?;
7205 let new_name = self.parse_identifier(false)?;
7206 AlterTableOperation::RenameConstraint { old_name, new_name }
7207 } else if self.parse_keyword(Keyword::TO) {
7208 let table_name = self.parse_object_name(false)?;
7209 AlterTableOperation::RenameTable { table_name }
7210 } else {
7211 let _ = self.parse_keyword(Keyword::COLUMN); let old_column_name = self.parse_identifier(false)?;
7213 self.expect_keyword(Keyword::TO)?;
7214 let new_column_name = self.parse_identifier(false)?;
7215 AlterTableOperation::RenameColumn {
7216 old_column_name,
7217 new_column_name,
7218 }
7219 }
7220 } else if self.parse_keyword(Keyword::DISABLE) {
7221 if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
7222 AlterTableOperation::DisableRowLevelSecurity {}
7223 } else if self.parse_keyword(Keyword::RULE) {
7224 let name = self.parse_identifier(false)?;
7225 AlterTableOperation::DisableRule { name }
7226 } else if self.parse_keyword(Keyword::TRIGGER) {
7227 let name = self.parse_identifier(false)?;
7228 AlterTableOperation::DisableTrigger { name }
7229 } else {
7230 return self.expected(
7231 "ROW LEVEL SECURITY, RULE, or TRIGGER after DISABLE",
7232 self.peek_token(),
7233 );
7234 }
7235 } else if self.parse_keyword(Keyword::ENABLE) {
7236 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) {
7237 let name = self.parse_identifier(false)?;
7238 AlterTableOperation::EnableAlwaysRule { name }
7239 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) {
7240 let name = self.parse_identifier(false)?;
7241 AlterTableOperation::EnableAlwaysTrigger { name }
7242 } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
7243 AlterTableOperation::EnableRowLevelSecurity {}
7244 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) {
7245 let name = self.parse_identifier(false)?;
7246 AlterTableOperation::EnableReplicaRule { name }
7247 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) {
7248 let name = self.parse_identifier(false)?;
7249 AlterTableOperation::EnableReplicaTrigger { name }
7250 } else if self.parse_keyword(Keyword::RULE) {
7251 let name = self.parse_identifier(false)?;
7252 AlterTableOperation::EnableRule { name }
7253 } else if self.parse_keyword(Keyword::TRIGGER) {
7254 let name = self.parse_identifier(false)?;
7255 AlterTableOperation::EnableTrigger { name }
7256 } else {
7257 return self.expected(
7258 "ALWAYS, REPLICA, ROW LEVEL SECURITY, RULE, or TRIGGER after ENABLE",
7259 self.peek_token(),
7260 );
7261 }
7262 } else if self.parse_keywords(&[Keyword::CLEAR, Keyword::PROJECTION])
7263 && dialect_of!(self is ClickHouseDialect|GenericDialect)
7264 {
7265 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7266 let name = self.parse_identifier(false)?;
7267 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
7268 Some(self.parse_identifier(false)?)
7269 } else {
7270 None
7271 };
7272 AlterTableOperation::ClearProjection {
7273 if_exists,
7274 name,
7275 partition,
7276 }
7277 } else if self.parse_keywords(&[Keyword::MATERIALIZE, Keyword::PROJECTION])
7278 && dialect_of!(self is ClickHouseDialect|GenericDialect)
7279 {
7280 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7281 let name = self.parse_identifier(false)?;
7282 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
7283 Some(self.parse_identifier(false)?)
7284 } else {
7285 None
7286 };
7287 AlterTableOperation::MaterializeProjection {
7288 if_exists,
7289 name,
7290 partition,
7291 }
7292 } else if self.parse_keyword(Keyword::DROP) {
7293 if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) {
7294 self.expect_token(&Token::LParen)?;
7295 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
7296 self.expect_token(&Token::RParen)?;
7297 AlterTableOperation::DropPartitions {
7298 partitions,
7299 if_exists: true,
7300 }
7301 } else if self.parse_keyword(Keyword::PARTITION) {
7302 self.expect_token(&Token::LParen)?;
7303 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
7304 self.expect_token(&Token::RParen)?;
7305 AlterTableOperation::DropPartitions {
7306 partitions,
7307 if_exists: false,
7308 }
7309 } else if self.parse_keyword(Keyword::CONSTRAINT) {
7310 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7311 let name = self.parse_identifier(false)?;
7312 let cascade = self.parse_keyword(Keyword::CASCADE);
7313 AlterTableOperation::DropConstraint {
7314 if_exists,
7315 name,
7316 cascade,
7317 }
7318 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
7319 && dialect_of!(self is MySqlDialect | GenericDialect)
7320 {
7321 AlterTableOperation::DropPrimaryKey
7322 } else if self.parse_keyword(Keyword::PROJECTION)
7323 && dialect_of!(self is ClickHouseDialect|GenericDialect)
7324 {
7325 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7326 let name = self.parse_identifier(false)?;
7327 AlterTableOperation::DropProjection { if_exists, name }
7328 } else if self.parse_keywords(&[Keyword::CLUSTERING, Keyword::KEY]) {
7329 AlterTableOperation::DropClusteringKey
7330 } else {
7331 let _ = self.parse_keyword(Keyword::COLUMN); let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7333 let column_name = self.parse_identifier(false)?;
7334 let cascade = self.parse_keyword(Keyword::CASCADE);
7335 AlterTableOperation::DropColumn {
7336 column_name,
7337 if_exists,
7338 cascade,
7339 }
7340 }
7341 } else if self.parse_keyword(Keyword::PARTITION) {
7342 self.expect_token(&Token::LParen)?;
7343 let before = self.parse_comma_separated(Parser::parse_expr)?;
7344 self.expect_token(&Token::RParen)?;
7345 self.expect_keyword(Keyword::RENAME)?;
7346 self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?;
7347 self.expect_token(&Token::LParen)?;
7348 let renames = self.parse_comma_separated(Parser::parse_expr)?;
7349 self.expect_token(&Token::RParen)?;
7350 AlterTableOperation::RenamePartitions {
7351 old_partitions: before,
7352 new_partitions: renames,
7353 }
7354 } else if self.parse_keyword(Keyword::CHANGE) {
7355 let _ = self.parse_keyword(Keyword::COLUMN); let old_name = self.parse_identifier(false)?;
7357 let new_name = self.parse_identifier(false)?;
7358 let data_type = self.parse_data_type()?;
7359 let mut options = vec![];
7360 while let Some(option) = self.parse_optional_column_option()? {
7361 options.push(option);
7362 }
7363
7364 let column_position = self.parse_column_position()?;
7365
7366 AlterTableOperation::ChangeColumn {
7367 old_name,
7368 new_name,
7369 data_type,
7370 options,
7371 column_position,
7372 }
7373 } else if self.parse_keyword(Keyword::MODIFY) {
7374 let _ = self.parse_keyword(Keyword::COLUMN); let col_name = self.parse_identifier(false)?;
7376 let data_type = self.parse_data_type()?;
7377 let mut options = vec![];
7378 while let Some(option) = self.parse_optional_column_option()? {
7379 options.push(option);
7380 }
7381
7382 let column_position = self.parse_column_position()?;
7383
7384 AlterTableOperation::ModifyColumn {
7385 col_name,
7386 data_type,
7387 options,
7388 column_position,
7389 }
7390 } else if self.parse_keyword(Keyword::ALTER) {
7391 let _ = self.parse_keyword(Keyword::COLUMN); let column_name = self.parse_identifier(false)?;
7393 let is_postgresql = dialect_of!(self is PostgreSqlDialect);
7394
7395 let op: AlterColumnOperation = if self.parse_keywords(&[
7396 Keyword::SET,
7397 Keyword::NOT,
7398 Keyword::NULL,
7399 ]) {
7400 AlterColumnOperation::SetNotNull {}
7401 } else if self.parse_keywords(&[Keyword::DROP, Keyword::NOT, Keyword::NULL]) {
7402 AlterColumnOperation::DropNotNull {}
7403 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
7404 AlterColumnOperation::SetDefault {
7405 value: self.parse_expr()?,
7406 }
7407 } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
7408 AlterColumnOperation::DropDefault {}
7409 } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE])
7410 || (is_postgresql && self.parse_keyword(Keyword::TYPE))
7411 {
7412 let data_type = self.parse_data_type()?;
7413 let using = if is_postgresql && self.parse_keyword(Keyword::USING) {
7414 Some(self.parse_expr()?)
7415 } else {
7416 None
7417 };
7418 AlterColumnOperation::SetDataType { data_type, using }
7419 } else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) {
7420 let generated_as = if self.parse_keyword(Keyword::ALWAYS) {
7421 Some(GeneratedAs::Always)
7422 } else if self.parse_keywords(&[Keyword::BY, Keyword::DEFAULT]) {
7423 Some(GeneratedAs::ByDefault)
7424 } else {
7425 None
7426 };
7427
7428 self.expect_keywords(&[Keyword::AS, Keyword::IDENTITY])?;
7429
7430 let mut sequence_options: Option<Vec<SequenceOptions>> = None;
7431
7432 if self.peek_token().token == Token::LParen {
7433 self.expect_token(&Token::LParen)?;
7434 sequence_options = Some(self.parse_create_sequence_options()?);
7435 self.expect_token(&Token::RParen)?;
7436 }
7437
7438 AlterColumnOperation::AddGenerated {
7439 generated_as,
7440 sequence_options,
7441 }
7442 } else {
7443 let message = if is_postgresql {
7444 "SET/DROP NOT NULL, SET DEFAULT, SET DATA TYPE, or ADD GENERATED after ALTER COLUMN"
7445 } else {
7446 "SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN"
7447 };
7448
7449 return self.expected(message, self.peek_token());
7450 };
7451 AlterTableOperation::AlterColumn { column_name, op }
7452 } else if self.parse_keyword(Keyword::SWAP) {
7453 self.expect_keyword(Keyword::WITH)?;
7454 let table_name = self.parse_object_name(false)?;
7455 AlterTableOperation::SwapWith { table_name }
7456 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect)
7457 && self.parse_keywords(&[Keyword::OWNER, Keyword::TO])
7458 {
7459 let new_owner = self.parse_owner()?;
7460 AlterTableOperation::OwnerTo { new_owner }
7461 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
7462 && self.parse_keyword(Keyword::ATTACH)
7463 {
7464 AlterTableOperation::AttachPartition {
7465 partition: self.parse_part_or_partition()?,
7466 }
7467 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
7468 && self.parse_keyword(Keyword::DETACH)
7469 {
7470 AlterTableOperation::DetachPartition {
7471 partition: self.parse_part_or_partition()?,
7472 }
7473 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
7474 && self.parse_keyword(Keyword::FREEZE)
7475 {
7476 let partition = self.parse_part_or_partition()?;
7477 let with_name = if self.parse_keyword(Keyword::WITH) {
7478 self.expect_keyword(Keyword::NAME)?;
7479 Some(self.parse_identifier(false)?)
7480 } else {
7481 None
7482 };
7483 AlterTableOperation::FreezePartition {
7484 partition,
7485 with_name,
7486 }
7487 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
7488 && self.parse_keyword(Keyword::UNFREEZE)
7489 {
7490 let partition = self.parse_part_or_partition()?;
7491 let with_name = if self.parse_keyword(Keyword::WITH) {
7492 self.expect_keyword(Keyword::NAME)?;
7493 Some(self.parse_identifier(false)?)
7494 } else {
7495 None
7496 };
7497 AlterTableOperation::UnfreezePartition {
7498 partition,
7499 with_name,
7500 }
7501 } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
7502 self.expect_token(&Token::LParen)?;
7503 let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?;
7504 self.expect_token(&Token::RParen)?;
7505 AlterTableOperation::ClusterBy { exprs }
7506 } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) {
7507 AlterTableOperation::SuspendRecluster
7508 } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) {
7509 AlterTableOperation::ResumeRecluster
7510 } else {
7511 let options: Vec<SqlOption> =
7512 self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?;
7513 if !options.is_empty() {
7514 AlterTableOperation::SetTblProperties {
7515 table_properties: options,
7516 }
7517 } else {
7518 return self.expected(
7519 "ADD, RENAME, PARTITION, SWAP, DROP, or SET TBLPROPERTIES after ALTER TABLE",
7520 self.peek_token(),
7521 );
7522 }
7523 };
7524 Ok(operation)
7525 }
7526
7527 fn parse_part_or_partition(&mut self) -> Result<Partition, ParserError> {
7528 let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?;
7529 match keyword {
7530 Keyword::PART => Ok(Partition::Part(self.parse_expr()?)),
7531 Keyword::PARTITION => Ok(Partition::Expr(self.parse_expr()?)),
7532 _ => unreachable!(),
7534 }
7535 }
7536
7537 pub fn parse_alter(&mut self) -> Result<Statement, ParserError> {
7538 let object_type = self.expect_one_of_keywords(&[
7539 Keyword::VIEW,
7540 Keyword::TABLE,
7541 Keyword::INDEX,
7542 Keyword::ROLE,
7543 Keyword::POLICY,
7544 ])?;
7545 match object_type {
7546 Keyword::VIEW => self.parse_alter_view(),
7547 Keyword::TABLE => {
7548 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7549 let only = self.parse_keyword(Keyword::ONLY); let table_name = self.parse_object_name(false)?;
7551 let on_cluster = self.parse_optional_on_cluster()?;
7552 let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?;
7553
7554 let mut location = None;
7555 if self.parse_keyword(Keyword::LOCATION) {
7556 location = Some(HiveSetLocation {
7557 has_set: false,
7558 location: self.parse_identifier(false)?,
7559 });
7560 } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) {
7561 location = Some(HiveSetLocation {
7562 has_set: true,
7563 location: self.parse_identifier(false)?,
7564 });
7565 }
7566
7567 Ok(Statement::AlterTable {
7568 name: table_name,
7569 if_exists,
7570 only,
7571 operations,
7572 location,
7573 on_cluster,
7574 })
7575 }
7576 Keyword::INDEX => {
7577 let index_name = self.parse_object_name(false)?;
7578 let operation = if self.parse_keyword(Keyword::RENAME) {
7579 if self.parse_keyword(Keyword::TO) {
7580 let index_name = self.parse_object_name(false)?;
7581 AlterIndexOperation::RenameIndex { index_name }
7582 } else {
7583 return self.expected("TO after RENAME", self.peek_token());
7584 }
7585 } else {
7586 return self.expected("RENAME after ALTER INDEX", self.peek_token());
7587 };
7588
7589 Ok(Statement::AlterIndex {
7590 name: index_name,
7591 operation,
7592 })
7593 }
7594 Keyword::ROLE => self.parse_alter_role(),
7595 Keyword::POLICY => self.parse_alter_policy(),
7596 _ => unreachable!(),
7598 }
7599 }
7600
7601 pub fn parse_alter_view(&mut self) -> Result<Statement, ParserError> {
7602 let name = self.parse_object_name(false)?;
7603 let columns = self.parse_parenthesized_column_list(Optional, false)?;
7604
7605 let with_options = self.parse_options(Keyword::WITH)?;
7606
7607 self.expect_keyword(Keyword::AS)?;
7608 let query = self.parse_query()?;
7609
7610 Ok(Statement::AlterView {
7611 name,
7612 columns,
7613 query,
7614 with_options,
7615 })
7616 }
7617
7618 pub fn parse_call(&mut self) -> Result<Statement, ParserError> {
7621 let object_name = self.parse_object_name(false)?;
7622 if self.peek_token().token == Token::LParen {
7623 match self.parse_function(object_name)? {
7624 Expr::Function(f) => Ok(Statement::Call(f)),
7625 other => parser_err!(
7626 format!("Expected a simple procedure call but found: {other}"),
7627 self.peek_token().span.start
7628 ),
7629 }
7630 } else {
7631 Ok(Statement::Call(Function {
7632 name: object_name,
7633 uses_odbc_syntax: false,
7634 parameters: FunctionArguments::None,
7635 args: FunctionArguments::None,
7636 over: None,
7637 filter: None,
7638 null_treatment: None,
7639 within_group: vec![],
7640 }))
7641 }
7642 }
7643
7644 pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
7646 let source;
7647 if self.consume_token(&Token::LParen) {
7648 source = CopySource::Query(self.parse_query()?);
7649 self.expect_token(&Token::RParen)?;
7650 } else {
7651 let table_name = self.parse_object_name(false)?;
7652 let columns = self.parse_parenthesized_column_list(Optional, false)?;
7653 source = CopySource::Table {
7654 table_name,
7655 columns,
7656 };
7657 }
7658 let to = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::TO]) {
7659 Some(Keyword::FROM) => false,
7660 Some(Keyword::TO) => true,
7661 _ => self.expected("FROM or TO", self.peek_token())?,
7662 };
7663 if !to {
7664 if let CopySource::Query(_) = source {
7667 return Err(ParserError::ParserError(
7668 "COPY ... FROM does not support query as a source".to_string(),
7669 ));
7670 }
7671 }
7672 let target = if self.parse_keyword(Keyword::STDIN) {
7673 CopyTarget::Stdin
7674 } else if self.parse_keyword(Keyword::STDOUT) {
7675 CopyTarget::Stdout
7676 } else if self.parse_keyword(Keyword::PROGRAM) {
7677 CopyTarget::Program {
7678 command: self.parse_literal_string()?,
7679 }
7680 } else {
7681 CopyTarget::File {
7682 filename: self.parse_literal_string()?,
7683 }
7684 };
7685 let _ = self.parse_keyword(Keyword::WITH); let mut options = vec![];
7687 if self.consume_token(&Token::LParen) {
7688 options = self.parse_comma_separated(Parser::parse_copy_option)?;
7689 self.expect_token(&Token::RParen)?;
7690 }
7691 let mut legacy_options = vec![];
7692 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
7693 legacy_options.push(opt);
7694 }
7695 let values = if let CopyTarget::Stdin = target {
7696 self.expect_token(&Token::SemiColon)?;
7697 self.parse_tsv()
7698 } else {
7699 vec![]
7700 };
7701 Ok(Statement::Copy {
7702 source,
7703 to,
7704 target,
7705 options,
7706 legacy_options,
7707 values,
7708 })
7709 }
7710
7711 pub fn parse_close(&mut self) -> Result<Statement, ParserError> {
7712 let cursor = if self.parse_keyword(Keyword::ALL) {
7713 CloseCursor::All
7714 } else {
7715 let name = self.parse_identifier(false)?;
7716
7717 CloseCursor::Specific { name }
7718 };
7719
7720 Ok(Statement::Close { cursor })
7721 }
7722
7723 fn parse_copy_option(&mut self) -> Result<CopyOption, ParserError> {
7724 let ret = match self.parse_one_of_keywords(&[
7725 Keyword::FORMAT,
7726 Keyword::FREEZE,
7727 Keyword::DELIMITER,
7728 Keyword::NULL,
7729 Keyword::HEADER,
7730 Keyword::QUOTE,
7731 Keyword::ESCAPE,
7732 Keyword::FORCE_QUOTE,
7733 Keyword::FORCE_NOT_NULL,
7734 Keyword::FORCE_NULL,
7735 Keyword::ENCODING,
7736 ]) {
7737 Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier(false)?),
7738 Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!(
7739 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
7740 Some(Keyword::FALSE)
7741 )),
7742 Some(Keyword::DELIMITER) => CopyOption::Delimiter(self.parse_literal_char()?),
7743 Some(Keyword::NULL) => CopyOption::Null(self.parse_literal_string()?),
7744 Some(Keyword::HEADER) => CopyOption::Header(!matches!(
7745 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
7746 Some(Keyword::FALSE)
7747 )),
7748 Some(Keyword::QUOTE) => CopyOption::Quote(self.parse_literal_char()?),
7749 Some(Keyword::ESCAPE) => CopyOption::Escape(self.parse_literal_char()?),
7750 Some(Keyword::FORCE_QUOTE) => {
7751 CopyOption::ForceQuote(self.parse_parenthesized_column_list(Mandatory, false)?)
7752 }
7753 Some(Keyword::FORCE_NOT_NULL) => {
7754 CopyOption::ForceNotNull(self.parse_parenthesized_column_list(Mandatory, false)?)
7755 }
7756 Some(Keyword::FORCE_NULL) => {
7757 CopyOption::ForceNull(self.parse_parenthesized_column_list(Mandatory, false)?)
7758 }
7759 Some(Keyword::ENCODING) => CopyOption::Encoding(self.parse_literal_string()?),
7760 _ => self.expected("option", self.peek_token())?,
7761 };
7762 Ok(ret)
7763 }
7764
7765 fn parse_copy_legacy_option(&mut self) -> Result<CopyLegacyOption, ParserError> {
7766 let ret = match self.parse_one_of_keywords(&[
7767 Keyword::BINARY,
7768 Keyword::DELIMITER,
7769 Keyword::NULL,
7770 Keyword::CSV,
7771 ]) {
7772 Some(Keyword::BINARY) => CopyLegacyOption::Binary,
7773 Some(Keyword::DELIMITER) => {
7774 let _ = self.parse_keyword(Keyword::AS); CopyLegacyOption::Delimiter(self.parse_literal_char()?)
7776 }
7777 Some(Keyword::NULL) => {
7778 let _ = self.parse_keyword(Keyword::AS); CopyLegacyOption::Null(self.parse_literal_string()?)
7780 }
7781 Some(Keyword::CSV) => CopyLegacyOption::Csv({
7782 let mut opts = vec![];
7783 while let Some(opt) =
7784 self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())?
7785 {
7786 opts.push(opt);
7787 }
7788 opts
7789 }),
7790 _ => self.expected("option", self.peek_token())?,
7791 };
7792 Ok(ret)
7793 }
7794
7795 fn parse_copy_legacy_csv_option(&mut self) -> Result<CopyLegacyCsvOption, ParserError> {
7796 let ret = match self.parse_one_of_keywords(&[
7797 Keyword::HEADER,
7798 Keyword::QUOTE,
7799 Keyword::ESCAPE,
7800 Keyword::FORCE,
7801 ]) {
7802 Some(Keyword::HEADER) => CopyLegacyCsvOption::Header,
7803 Some(Keyword::QUOTE) => {
7804 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Quote(self.parse_literal_char()?)
7806 }
7807 Some(Keyword::ESCAPE) => {
7808 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Escape(self.parse_literal_char()?)
7810 }
7811 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => {
7812 CopyLegacyCsvOption::ForceNotNull(
7813 self.parse_comma_separated(|p| p.parse_identifier(false))?,
7814 )
7815 }
7816 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => {
7817 CopyLegacyCsvOption::ForceQuote(
7818 self.parse_comma_separated(|p| p.parse_identifier(false))?,
7819 )
7820 }
7821 _ => self.expected("csv option", self.peek_token())?,
7822 };
7823 Ok(ret)
7824 }
7825
7826 fn parse_literal_char(&mut self) -> Result<char, ParserError> {
7827 let s = self.parse_literal_string()?;
7828 if s.len() != 1 {
7829 let loc = self
7830 .tokens
7831 .get(self.index - 1)
7832 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
7833 return parser_err!(format!("Expect a char, found {s:?}"), loc);
7834 }
7835 Ok(s.chars().next().unwrap())
7836 }
7837
7838 pub fn parse_tsv(&mut self) -> Vec<Option<String>> {
7841 self.parse_tab_value()
7842 }
7843
7844 pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
7845 let mut values = vec![];
7846 let mut content = String::from("");
7847 while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
7848 match t {
7849 Token::Whitespace(Whitespace::Tab) => {
7850 values.push(Some(content.to_string()));
7851 content.clear();
7852 }
7853 Token::Whitespace(Whitespace::Newline) => {
7854 values.push(Some(content.to_string()));
7855 content.clear();
7856 }
7857 Token::Backslash => {
7858 if self.consume_token(&Token::Period) {
7859 return values;
7860 }
7861 if let Token::Word(w) = self.next_token().token {
7862 if w.value == "N" {
7863 values.push(None);
7864 }
7865 }
7866 }
7867 _ => {
7868 content.push_str(&t.to_string());
7869 }
7870 }
7871 }
7872 values
7873 }
7874
7875 pub fn parse_value(&mut self) -> Result<Value, ParserError> {
7877 let next_token = self.next_token();
7878 let span = next_token.span;
7879 match next_token.token {
7880 Token::Word(w) => match w.keyword {
7881 Keyword::TRUE if self.dialect.supports_boolean_literals() => {
7882 Ok(Value::Boolean(true))
7883 }
7884 Keyword::FALSE if self.dialect.supports_boolean_literals() => {
7885 Ok(Value::Boolean(false))
7886 }
7887 Keyword::NULL => Ok(Value::Null),
7888 Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style {
7889 Some('"') => Ok(Value::DoubleQuotedString(w.value)),
7890 Some('\'') => Ok(Value::SingleQuotedString(w.value)),
7891 _ => self.expected(
7892 "A value?",
7893 TokenWithSpan {
7894 token: Token::Word(w),
7895 span,
7896 },
7897 )?,
7898 },
7899 _ => self.expected(
7900 "a concrete value",
7901 TokenWithSpan {
7902 token: Token::Word(w),
7903 span,
7904 },
7905 ),
7906 },
7907 Token::Number(n, l) => Ok(Value::Number(Self::parse(n, span.start)?, l)),
7911 Token::SingleQuotedString(ref s) => Ok(Value::SingleQuotedString(s.to_string())),
7912 Token::DoubleQuotedString(ref s) => Ok(Value::DoubleQuotedString(s.to_string())),
7913 Token::TripleSingleQuotedString(ref s) => {
7914 Ok(Value::TripleSingleQuotedString(s.to_string()))
7915 }
7916 Token::TripleDoubleQuotedString(ref s) => {
7917 Ok(Value::TripleDoubleQuotedString(s.to_string()))
7918 }
7919 Token::DollarQuotedString(ref s) => Ok(Value::DollarQuotedString(s.clone())),
7920 Token::SingleQuotedByteStringLiteral(ref s) => {
7921 Ok(Value::SingleQuotedByteStringLiteral(s.clone()))
7922 }
7923 Token::DoubleQuotedByteStringLiteral(ref s) => {
7924 Ok(Value::DoubleQuotedByteStringLiteral(s.clone()))
7925 }
7926 Token::TripleSingleQuotedByteStringLiteral(ref s) => {
7927 Ok(Value::TripleSingleQuotedByteStringLiteral(s.clone()))
7928 }
7929 Token::TripleDoubleQuotedByteStringLiteral(ref s) => {
7930 Ok(Value::TripleDoubleQuotedByteStringLiteral(s.clone()))
7931 }
7932 Token::SingleQuotedRawStringLiteral(ref s) => {
7933 Ok(Value::SingleQuotedRawStringLiteral(s.clone()))
7934 }
7935 Token::DoubleQuotedRawStringLiteral(ref s) => {
7936 Ok(Value::DoubleQuotedRawStringLiteral(s.clone()))
7937 }
7938 Token::TripleSingleQuotedRawStringLiteral(ref s) => {
7939 Ok(Value::TripleSingleQuotedRawStringLiteral(s.clone()))
7940 }
7941 Token::TripleDoubleQuotedRawStringLiteral(ref s) => {
7942 Ok(Value::TripleDoubleQuotedRawStringLiteral(s.clone()))
7943 }
7944 Token::NationalStringLiteral(ref s) => Ok(Value::NationalStringLiteral(s.to_string())),
7945 Token::EscapedStringLiteral(ref s) => Ok(Value::EscapedStringLiteral(s.to_string())),
7946 Token::UnicodeStringLiteral(ref s) => Ok(Value::UnicodeStringLiteral(s.to_string())),
7947 Token::HexStringLiteral(ref s) => Ok(Value::HexStringLiteral(s.to_string())),
7948 Token::Placeholder(ref s) => Ok(Value::Placeholder(s.to_string())),
7949 tok @ Token::Colon | tok @ Token::AtSign => {
7950 let next_token = self.next_token();
7953 let ident = match next_token.token {
7954 Token::Word(w) => Ok(w.to_ident(next_token.span)),
7955 Token::Number(w, false) => Ok(Ident::new(w)),
7956 _ => self.expected("placeholder", next_token),
7957 }?;
7958 let placeholder = tok.to_string() + &ident.value;
7959 Ok(Value::Placeholder(placeholder))
7960 }
7961 unexpected => self.expected(
7962 "a value",
7963 TokenWithSpan {
7964 token: unexpected,
7965 span,
7966 },
7967 ),
7968 }
7969 }
7970
7971 pub fn parse_number_value(&mut self) -> Result<Value, ParserError> {
7973 match self.parse_value()? {
7974 v @ Value::Number(_, _) => Ok(v),
7975 v @ Value::Placeholder(_) => Ok(v),
7976 _ => {
7977 self.prev_token();
7978 self.expected("literal number", self.peek_token())
7979 }
7980 }
7981 }
7982
7983 pub fn parse_number(&mut self) -> Result<Expr, ParserError> {
7986 let next_token = self.next_token();
7987 match next_token.token {
7988 Token::Plus => Ok(Expr::UnaryOp {
7989 op: UnaryOperator::Plus,
7990 expr: Box::new(Expr::Value(self.parse_number_value()?)),
7991 }),
7992 Token::Minus => Ok(Expr::UnaryOp {
7993 op: UnaryOperator::Minus,
7994 expr: Box::new(Expr::Value(self.parse_number_value()?)),
7995 }),
7996 _ => {
7997 self.prev_token();
7998 Ok(Expr::Value(self.parse_number_value()?))
7999 }
8000 }
8001 }
8002
8003 fn parse_introduced_string_value(&mut self) -> Result<Value, ParserError> {
8004 let next_token = self.next_token();
8005 let span = next_token.span;
8006 match next_token.token {
8007 Token::SingleQuotedString(ref s) => Ok(Value::SingleQuotedString(s.to_string())),
8008 Token::DoubleQuotedString(ref s) => Ok(Value::DoubleQuotedString(s.to_string())),
8009 Token::HexStringLiteral(ref s) => Ok(Value::HexStringLiteral(s.to_string())),
8010 unexpected => self.expected(
8011 "a string value",
8012 TokenWithSpan {
8013 token: unexpected,
8014 span,
8015 },
8016 ),
8017 }
8018 }
8019
8020 pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
8022 let next_token = self.next_token();
8023 match next_token.token {
8024 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start),
8025 _ => self.expected("literal int", next_token),
8026 }
8027 }
8028
8029 fn parse_create_function_body_string(&mut self) -> Result<Expr, ParserError> {
8032 let peek_token = self.peek_token();
8033 match peek_token.token {
8034 Token::DollarQuotedString(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
8035 {
8036 self.next_token();
8037 Ok(Expr::Value(Value::DollarQuotedString(s)))
8038 }
8039 _ => Ok(Expr::Value(Value::SingleQuotedString(
8040 self.parse_literal_string()?,
8041 ))),
8042 }
8043 }
8044
8045 pub fn parse_literal_string(&mut self) -> Result<String, ParserError> {
8047 let next_token = self.next_token();
8048 match next_token.token {
8049 Token::Word(Word {
8050 value,
8051 keyword: Keyword::NoKeyword,
8052 ..
8053 }) => Ok(value),
8054 Token::SingleQuotedString(s) => Ok(s),
8055 Token::DoubleQuotedString(s) => Ok(s),
8056 Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
8057 Ok(s)
8058 }
8059 Token::UnicodeStringLiteral(s) => Ok(s),
8060 _ => self.expected("literal string", next_token),
8061 }
8062 }
8063
8064 pub fn parse_enum_values(&mut self) -> Result<Vec<EnumMember>, ParserError> {
8065 self.expect_token(&Token::LParen)?;
8066 let values = self.parse_comma_separated(|parser| {
8067 let name = parser.parse_literal_string()?;
8068 let e = if parser.consume_token(&Token::Eq) {
8069 let value = parser.parse_number()?;
8070 EnumMember::NamedValue(name, value)
8071 } else {
8072 EnumMember::Name(name)
8073 };
8074 Ok(e)
8075 })?;
8076 self.expect_token(&Token::RParen)?;
8077
8078 Ok(values)
8079 }
8080
8081 pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
8083 let (ty, trailing_bracket) = self.parse_data_type_helper()?;
8084 if trailing_bracket.0 {
8085 return parser_err!(
8086 format!("unmatched > after parsing data type {ty}"),
8087 self.peek_token()
8088 );
8089 }
8090
8091 Ok(ty)
8092 }
8093
8094 fn parse_data_type_helper(
8095 &mut self,
8096 ) -> Result<(DataType, MatchedTrailingBracket), ParserError> {
8097 let next_token = self.next_token();
8098 let mut trailing_bracket: MatchedTrailingBracket = false.into();
8099 let mut data = match next_token.token {
8100 Token::Word(w) => match w.keyword {
8101 Keyword::BOOLEAN => Ok(DataType::Boolean),
8102 Keyword::BOOL => Ok(DataType::Bool),
8103 Keyword::FLOAT => Ok(DataType::Float(self.parse_optional_precision()?)),
8104 Keyword::REAL => Ok(DataType::Real),
8105 Keyword::FLOAT4 => Ok(DataType::Float4),
8106 Keyword::FLOAT32 => Ok(DataType::Float32),
8107 Keyword::FLOAT64 => Ok(DataType::Float64),
8108 Keyword::FLOAT8 => Ok(DataType::Float8),
8109 Keyword::DOUBLE => {
8110 if self.parse_keyword(Keyword::PRECISION) {
8111 Ok(DataType::DoublePrecision)
8112 } else {
8113 Ok(DataType::Double)
8114 }
8115 }
8116 Keyword::TINYINT => {
8117 let optional_precision = self.parse_optional_precision();
8118 if self.parse_keyword(Keyword::UNSIGNED) {
8119 Ok(DataType::UnsignedTinyInt(optional_precision?))
8120 } else {
8121 Ok(DataType::TinyInt(optional_precision?))
8122 }
8123 }
8124 Keyword::INT2 => {
8125 let optional_precision = self.parse_optional_precision();
8126 if self.parse_keyword(Keyword::UNSIGNED) {
8127 Ok(DataType::UnsignedInt2(optional_precision?))
8128 } else {
8129 Ok(DataType::Int2(optional_precision?))
8130 }
8131 }
8132 Keyword::SMALLINT => {
8133 let optional_precision = self.parse_optional_precision();
8134 if self.parse_keyword(Keyword::UNSIGNED) {
8135 Ok(DataType::UnsignedSmallInt(optional_precision?))
8136 } else {
8137 Ok(DataType::SmallInt(optional_precision?))
8138 }
8139 }
8140 Keyword::MEDIUMINT => {
8141 let optional_precision = self.parse_optional_precision();
8142 if self.parse_keyword(Keyword::UNSIGNED) {
8143 Ok(DataType::UnsignedMediumInt(optional_precision?))
8144 } else {
8145 Ok(DataType::MediumInt(optional_precision?))
8146 }
8147 }
8148 Keyword::INT => {
8149 let optional_precision = self.parse_optional_precision();
8150 if self.parse_keyword(Keyword::UNSIGNED) {
8151 Ok(DataType::UnsignedInt(optional_precision?))
8152 } else {
8153 Ok(DataType::Int(optional_precision?))
8154 }
8155 }
8156 Keyword::INT4 => {
8157 let optional_precision = self.parse_optional_precision();
8158 if self.parse_keyword(Keyword::UNSIGNED) {
8159 Ok(DataType::UnsignedInt4(optional_precision?))
8160 } else {
8161 Ok(DataType::Int4(optional_precision?))
8162 }
8163 }
8164 Keyword::INT8 => {
8165 let optional_precision = self.parse_optional_precision();
8166 if self.parse_keyword(Keyword::UNSIGNED) {
8167 Ok(DataType::UnsignedInt8(optional_precision?))
8168 } else {
8169 Ok(DataType::Int8(optional_precision?))
8170 }
8171 }
8172 Keyword::INT16 => Ok(DataType::Int16),
8173 Keyword::INT32 => Ok(DataType::Int32),
8174 Keyword::INT64 => Ok(DataType::Int64),
8175 Keyword::INT128 => Ok(DataType::Int128),
8176 Keyword::INT256 => Ok(DataType::Int256),
8177 Keyword::INTEGER => {
8178 let optional_precision = self.parse_optional_precision();
8179 if self.parse_keyword(Keyword::UNSIGNED) {
8180 Ok(DataType::UnsignedInteger(optional_precision?))
8181 } else {
8182 Ok(DataType::Integer(optional_precision?))
8183 }
8184 }
8185 Keyword::BIGINT => {
8186 let optional_precision = self.parse_optional_precision();
8187 if self.parse_keyword(Keyword::UNSIGNED) {
8188 Ok(DataType::UnsignedBigInt(optional_precision?))
8189 } else {
8190 Ok(DataType::BigInt(optional_precision?))
8191 }
8192 }
8193 Keyword::UINT8 => Ok(DataType::UInt8),
8194 Keyword::UINT16 => Ok(DataType::UInt16),
8195 Keyword::UINT32 => Ok(DataType::UInt32),
8196 Keyword::UINT64 => Ok(DataType::UInt64),
8197 Keyword::UINT128 => Ok(DataType::UInt128),
8198 Keyword::UINT256 => Ok(DataType::UInt256),
8199 Keyword::VARCHAR => Ok(DataType::Varchar(self.parse_optional_character_length()?)),
8200 Keyword::NVARCHAR => {
8201 Ok(DataType::Nvarchar(self.parse_optional_character_length()?))
8202 }
8203 Keyword::CHARACTER => {
8204 if self.parse_keyword(Keyword::VARYING) {
8205 Ok(DataType::CharacterVarying(
8206 self.parse_optional_character_length()?,
8207 ))
8208 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
8209 Ok(DataType::CharacterLargeObject(
8210 self.parse_optional_precision()?,
8211 ))
8212 } else {
8213 Ok(DataType::Character(self.parse_optional_character_length()?))
8214 }
8215 }
8216 Keyword::CHAR => {
8217 if self.parse_keyword(Keyword::VARYING) {
8218 Ok(DataType::CharVarying(
8219 self.parse_optional_character_length()?,
8220 ))
8221 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
8222 Ok(DataType::CharLargeObject(self.parse_optional_precision()?))
8223 } else {
8224 Ok(DataType::Char(self.parse_optional_character_length()?))
8225 }
8226 }
8227 Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)),
8228 Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)),
8229 Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_precision()?)),
8230 Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)),
8231 Keyword::TINYBLOB => Ok(DataType::TinyBlob),
8232 Keyword::MEDIUMBLOB => Ok(DataType::MediumBlob),
8233 Keyword::LONGBLOB => Ok(DataType::LongBlob),
8234 Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)),
8235 Keyword::BIT => {
8236 if self.parse_keyword(Keyword::VARYING) {
8237 Ok(DataType::BitVarying(self.parse_optional_precision()?))
8238 } else {
8239 Ok(DataType::Bit(self.parse_optional_precision()?))
8240 }
8241 }
8242 Keyword::UUID => Ok(DataType::Uuid),
8243 Keyword::DATE => Ok(DataType::Date),
8244 Keyword::DATE32 => Ok(DataType::Date32),
8245 Keyword::DATETIME => Ok(DataType::Datetime(self.parse_optional_precision()?)),
8246 Keyword::DATETIME64 => {
8247 self.prev_token();
8248 let (precision, time_zone) = self.parse_datetime_64()?;
8249 Ok(DataType::Datetime64(precision, time_zone))
8250 }
8251 Keyword::TIMESTAMP => {
8252 let precision = self.parse_optional_precision()?;
8253 let tz = if self.parse_keyword(Keyword::WITH) {
8254 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
8255 TimezoneInfo::WithTimeZone
8256 } else if self.parse_keyword(Keyword::WITHOUT) {
8257 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
8258 TimezoneInfo::WithoutTimeZone
8259 } else {
8260 TimezoneInfo::None
8261 };
8262 Ok(DataType::Timestamp(precision, tz))
8263 }
8264 Keyword::TIMESTAMPTZ => Ok(DataType::Timestamp(
8265 self.parse_optional_precision()?,
8266 TimezoneInfo::Tz,
8267 )),
8268 Keyword::TIME => {
8269 let precision = self.parse_optional_precision()?;
8270 let tz = if self.parse_keyword(Keyword::WITH) {
8271 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
8272 TimezoneInfo::WithTimeZone
8273 } else if self.parse_keyword(Keyword::WITHOUT) {
8274 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
8275 TimezoneInfo::WithoutTimeZone
8276 } else {
8277 TimezoneInfo::None
8278 };
8279 Ok(DataType::Time(precision, tz))
8280 }
8281 Keyword::TIMETZ => Ok(DataType::Time(
8282 self.parse_optional_precision()?,
8283 TimezoneInfo::Tz,
8284 )),
8285 Keyword::INTERVAL => Ok(DataType::Interval),
8289 Keyword::JSON => Ok(DataType::JSON),
8290 Keyword::JSONB => Ok(DataType::JSONB),
8291 Keyword::REGCLASS => Ok(DataType::Regclass),
8292 Keyword::STRING => Ok(DataType::String(self.parse_optional_precision()?)),
8293 Keyword::FIXEDSTRING => {
8294 self.expect_token(&Token::LParen)?;
8295 let character_length = self.parse_literal_uint()?;
8296 self.expect_token(&Token::RParen)?;
8297 Ok(DataType::FixedString(character_length))
8298 }
8299 Keyword::TEXT => Ok(DataType::Text),
8300 Keyword::TINYTEXT => Ok(DataType::TinyText),
8301 Keyword::MEDIUMTEXT => Ok(DataType::MediumText),
8302 Keyword::LONGTEXT => Ok(DataType::LongText),
8303 Keyword::BYTEA => Ok(DataType::Bytea),
8304 Keyword::NUMERIC => Ok(DataType::Numeric(
8305 self.parse_exact_number_optional_precision_scale()?,
8306 )),
8307 Keyword::DECIMAL => Ok(DataType::Decimal(
8308 self.parse_exact_number_optional_precision_scale()?,
8309 )),
8310 Keyword::DEC => Ok(DataType::Dec(
8311 self.parse_exact_number_optional_precision_scale()?,
8312 )),
8313 Keyword::BIGNUMERIC => Ok(DataType::BigNumeric(
8314 self.parse_exact_number_optional_precision_scale()?,
8315 )),
8316 Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
8317 self.parse_exact_number_optional_precision_scale()?,
8318 )),
8319 Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)),
8320 Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))),
8321 Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))),
8322 Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
8323 Keyword::ARRAY => {
8324 if dialect_of!(self is SnowflakeDialect) {
8325 Ok(DataType::Array(ArrayElemTypeDef::None))
8326 } else if dialect_of!(self is ClickHouseDialect) {
8327 Ok(self.parse_sub_type(|internal_type| {
8328 DataType::Array(ArrayElemTypeDef::Parenthesis(internal_type))
8329 })?)
8330 } else {
8331 self.expect_token(&Token::Lt)?;
8332 let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?;
8333 trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?;
8334 Ok(DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
8335 inside_type,
8336 ))))
8337 }
8338 }
8339 Keyword::STRUCT if dialect_of!(self is DuckDbDialect) => {
8340 self.prev_token();
8341 let field_defs = self.parse_duckdb_struct_type_def()?;
8342 Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses))
8343 }
8344 Keyword::STRUCT if dialect_of!(self is BigQueryDialect | GenericDialect) => {
8345 self.prev_token();
8346 let (field_defs, _trailing_bracket) =
8347 self.parse_struct_type_def(Self::parse_struct_field_def)?;
8348 trailing_bracket = _trailing_bracket;
8349 Ok(DataType::Struct(
8350 field_defs,
8351 StructBracketKind::AngleBrackets,
8352 ))
8353 }
8354 Keyword::UNION if dialect_of!(self is DuckDbDialect | GenericDialect) => {
8355 self.prev_token();
8356 let fields = self.parse_union_type_def()?;
8357 Ok(DataType::Union(fields))
8358 }
8359 Keyword::NULLABLE if dialect_of!(self is ClickHouseDialect | GenericDialect) => {
8360 Ok(self.parse_sub_type(DataType::Nullable)?)
8361 }
8362 Keyword::LOWCARDINALITY if dialect_of!(self is ClickHouseDialect | GenericDialect) => {
8363 Ok(self.parse_sub_type(DataType::LowCardinality)?)
8364 }
8365 Keyword::MAP if dialect_of!(self is ClickHouseDialect | GenericDialect) => {
8366 self.prev_token();
8367 let (key_data_type, value_data_type) = self.parse_click_house_map_def()?;
8368 Ok(DataType::Map(
8369 Box::new(key_data_type),
8370 Box::new(value_data_type),
8371 ))
8372 }
8373 Keyword::NESTED if dialect_of!(self is ClickHouseDialect | GenericDialect) => {
8374 self.expect_token(&Token::LParen)?;
8375 let field_defs = self.parse_comma_separated(Parser::parse_column_def)?;
8376 self.expect_token(&Token::RParen)?;
8377 Ok(DataType::Nested(field_defs))
8378 }
8379 Keyword::TUPLE if dialect_of!(self is ClickHouseDialect | GenericDialect) => {
8380 self.prev_token();
8381 let field_defs = self.parse_click_house_tuple_def()?;
8382 Ok(DataType::Tuple(field_defs))
8383 }
8384 Keyword::TRIGGER => Ok(DataType::Trigger),
8385 _ => {
8386 self.prev_token();
8387 let type_name = self.parse_object_name(false)?;
8388 if let Some(modifiers) = self.parse_optional_type_modifiers()? {
8389 Ok(DataType::Custom(type_name, modifiers))
8390 } else {
8391 Ok(DataType::Custom(type_name, vec![]))
8392 }
8393 }
8394 },
8395 _ => self.expected("a data type name", next_token),
8396 }?;
8397
8398 while self.consume_token(&Token::LBracket) {
8401 let size = if dialect_of!(self is GenericDialect | DuckDbDialect | PostgreSqlDialect) {
8402 self.maybe_parse(|p| p.parse_literal_uint())?
8403 } else {
8404 None
8405 };
8406 self.expect_token(&Token::RBracket)?;
8407 data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size))
8408 }
8409 Ok((data, trailing_bracket))
8410 }
8411
8412 pub fn parse_string_values(&mut self) -> Result<Vec<String>, ParserError> {
8413 self.expect_token(&Token::LParen)?;
8414 let mut values = Vec::new();
8415 loop {
8416 let next_token = self.next_token();
8417 match next_token.token {
8418 Token::SingleQuotedString(value) => values.push(value),
8419 _ => self.expected("a string", next_token)?,
8420 }
8421 let next_token = self.next_token();
8422 match next_token.token {
8423 Token::Comma => (),
8424 Token::RParen => break,
8425 _ => self.expected(", or }", next_token)?,
8426 }
8427 }
8428 Ok(values)
8429 }
8430
8431 pub fn parse_identifier_with_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
8433 let ident = self.parse_identifier(false)?;
8434 self.expect_keyword(Keyword::AS)?;
8435 let alias = self.parse_identifier(false)?;
8436 Ok(IdentWithAlias { ident, alias })
8437 }
8438
8439 pub fn parse_optional_alias(
8443 &mut self,
8444 reserved_kwds: &[Keyword],
8445 ) -> Result<Option<Ident>, ParserError> {
8446 let after_as = self.parse_keyword(Keyword::AS);
8447 let next_token = self.next_token();
8448 match next_token.token {
8449 Token::Word(w) if after_as || !reserved_kwds.contains(&w.keyword) => {
8455 Ok(Some(w.to_ident(next_token.span)))
8456 }
8457 Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))),
8470 Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))),
8472 _ => {
8473 if after_as {
8474 return self.expected("an identifier after AS", next_token);
8475 }
8476 self.prev_token();
8477 Ok(None) }
8479 }
8480 }
8481
8482 pub fn parse_optional_table_alias(
8487 &mut self,
8488 reserved_kwds: &[Keyword],
8489 ) -> Result<Option<TableAlias>, ParserError> {
8490 match self.parse_optional_alias(reserved_kwds)? {
8491 Some(name) => {
8492 let columns = self.parse_table_alias_column_defs()?;
8493 Ok(Some(TableAlias { name, columns }))
8494 }
8495 None => Ok(None),
8496 }
8497 }
8498
8499 pub fn parse_optional_group_by(&mut self) -> Result<Option<GroupByExpr>, ParserError> {
8500 if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
8501 let expressions = if self.parse_keyword(Keyword::ALL) {
8502 None
8503 } else {
8504 Some(self.parse_comma_separated(Parser::parse_group_by_expr)?)
8505 };
8506
8507 let mut modifiers = vec![];
8508 if dialect_of!(self is ClickHouseDialect | GenericDialect) {
8509 loop {
8510 if !self.parse_keyword(Keyword::WITH) {
8511 break;
8512 }
8513 let keyword = self.expect_one_of_keywords(&[
8514 Keyword::ROLLUP,
8515 Keyword::CUBE,
8516 Keyword::TOTALS,
8517 ])?;
8518 modifiers.push(match keyword {
8519 Keyword::ROLLUP => GroupByWithModifier::Rollup,
8520 Keyword::CUBE => GroupByWithModifier::Cube,
8521 Keyword::TOTALS => GroupByWithModifier::Totals,
8522 _ => {
8523 return parser_err!(
8524 "BUG: expected to match GroupBy modifier keyword",
8525 self.peek_token().span.start
8526 )
8527 }
8528 });
8529 }
8530 }
8531 let group_by = match expressions {
8532 None => GroupByExpr::All(modifiers),
8533 Some(exprs) => GroupByExpr::Expressions(exprs, modifiers),
8534 };
8535 Ok(Some(group_by))
8536 } else {
8537 Ok(None)
8538 }
8539 }
8540
8541 pub fn parse_optional_order_by(&mut self) -> Result<Option<OrderBy>, ParserError> {
8542 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
8543 let order_by_exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
8544 let interpolate = if dialect_of!(self is ClickHouseDialect | GenericDialect) {
8545 self.parse_interpolations()?
8546 } else {
8547 None
8548 };
8549
8550 Ok(Some(OrderBy {
8551 exprs: order_by_exprs,
8552 interpolate,
8553 }))
8554 } else {
8555 Ok(None)
8556 }
8557 }
8558
8559 pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result<ObjectName, ParserError> {
8566 let mut idents = vec![];
8567 loop {
8568 if self.dialect.supports_object_name_double_dot_notation()
8569 && idents.len() == 1
8570 && self.consume_token(&Token::Period)
8571 {
8572 idents.push(Ident::new(""));
8574 }
8575 idents.push(self.parse_identifier(in_table_clause)?);
8576 if !self.consume_token(&Token::Period) {
8577 break;
8578 }
8579 }
8580
8581 if dialect_of!(self is BigQueryDialect)
8584 && idents.iter().any(|ident| ident.value.contains('.'))
8585 {
8586 idents = idents
8587 .into_iter()
8588 .flat_map(|ident| {
8589 ident
8590 .value
8591 .split('.')
8592 .map(|value| Ident {
8593 value: value.into(),
8594 quote_style: ident.quote_style,
8595 span: ident.span,
8596 })
8597 .collect::<Vec<_>>()
8598 })
8599 .collect()
8600 }
8601
8602 Ok(ObjectName(idents))
8603 }
8604
8605 pub fn parse_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
8607 let mut idents = vec![];
8608 loop {
8609 match self.peek_token().token {
8610 Token::Word(w) => {
8611 idents.push(w.to_ident(self.peek_token().span));
8612 }
8613 Token::EOF | Token::Eq => break,
8614 _ => {}
8615 }
8616 self.next_token();
8617 }
8618 Ok(idents)
8619 }
8620
8621 pub fn parse_multipart_identifier(&mut self) -> Result<Vec<Ident>, ParserError> {
8661 let mut idents = vec![];
8662
8663 let next_token = self.next_token();
8665 match next_token.token {
8666 Token::Word(w) => idents.push(w.to_ident(next_token.span)),
8667 Token::EOF => {
8668 return Err(ParserError::ParserError(
8669 "Empty input when parsing identifier".to_string(),
8670 ))?
8671 }
8672 token => {
8673 return Err(ParserError::ParserError(format!(
8674 "Unexpected token in identifier: {token}"
8675 )))?
8676 }
8677 };
8678
8679 loop {
8681 match self.next_token().token {
8682 Token::Period => {
8684 let next_token = self.next_token();
8685 match next_token.token {
8686 Token::Word(w) => idents.push(w.to_ident(next_token.span)),
8687 Token::EOF => {
8688 return Err(ParserError::ParserError(
8689 "Trailing period in identifier".to_string(),
8690 ))?
8691 }
8692 token => {
8693 return Err(ParserError::ParserError(format!(
8694 "Unexpected token following period in identifier: {token}"
8695 )))?
8696 }
8697 }
8698 }
8699 Token::EOF => break,
8700 token => {
8701 return Err(ParserError::ParserError(format!(
8702 "Unexpected token in identifier: {token}"
8703 )))?
8704 }
8705 }
8706 }
8707
8708 Ok(idents)
8709 }
8710
8711 pub fn parse_identifier(&mut self, in_table_clause: bool) -> Result<Ident, ParserError> {
8717 let next_token = self.next_token();
8718 match next_token.token {
8719 Token::Word(w) => {
8720 let mut ident = w.to_ident(next_token.span);
8721
8722 if dialect_of!(self is BigQueryDialect)
8731 && w.quote_style.is_none()
8732 && in_table_clause
8733 {
8734 let mut requires_whitespace = false;
8735 while matches!(self.peek_token_no_skip().token, Token::Minus) {
8736 self.next_token();
8737 ident.value.push('-');
8738
8739 let token = self
8740 .next_token_no_skip()
8741 .cloned()
8742 .unwrap_or(TokenWithSpan::wrap(Token::EOF));
8743 requires_whitespace = match token.token {
8744 Token::Word(next_word) if next_word.quote_style.is_none() => {
8745 ident.value.push_str(&next_word.value);
8746 false
8747 }
8748 Token::Number(s, false) if s.chars().all(|c| c.is_ascii_digit()) => {
8749 ident.value.push_str(&s);
8750 true
8751 }
8752 _ => {
8753 return self
8754 .expected("continuation of hyphenated identifier", token);
8755 }
8756 }
8757 }
8758
8759 if requires_whitespace {
8762 let token = self.next_token();
8763 if !matches!(token.token, Token::EOF | Token::Whitespace(_)) {
8764 return self
8765 .expected("whitespace following hyphenated identifier", token);
8766 }
8767 }
8768 }
8769 Ok(ident)
8770 }
8771 Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
8772 Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
8773 _ => self.expected("identifier", next_token),
8774 }
8775 }
8776
8777 fn parse_view_columns(&mut self) -> Result<Vec<ViewColumnDef>, ParserError> {
8779 if self.consume_token(&Token::LParen) {
8780 if self.peek_token().token == Token::RParen {
8781 self.next_token();
8782 Ok(vec![])
8783 } else {
8784 let cols = self.parse_comma_separated(Parser::parse_view_column)?;
8785 self.expect_token(&Token::RParen)?;
8786 Ok(cols)
8787 }
8788 } else {
8789 Ok(vec![])
8790 }
8791 }
8792
8793 fn parse_view_column(&mut self) -> Result<ViewColumnDef, ParserError> {
8795 let name = self.parse_identifier(false)?;
8796 let options = if (dialect_of!(self is BigQueryDialect | GenericDialect)
8797 && self.parse_keyword(Keyword::OPTIONS))
8798 || (dialect_of!(self is SnowflakeDialect | GenericDialect)
8799 && self.parse_keyword(Keyword::COMMENT))
8800 {
8801 self.prev_token();
8802 self.parse_optional_column_option()?
8803 .map(|option| vec![option])
8804 } else {
8805 None
8806 };
8807 let data_type = if dialect_of!(self is ClickHouseDialect) {
8808 Some(self.parse_data_type()?)
8809 } else {
8810 None
8811 };
8812 Ok(ViewColumnDef {
8813 name,
8814 data_type,
8815 options,
8816 })
8817 }
8818
8819 pub fn parse_parenthesized_column_list(
8821 &mut self,
8822 optional: IsOptional,
8823 allow_empty: bool,
8824 ) -> Result<Vec<Ident>, ParserError> {
8825 if self.consume_token(&Token::LParen) {
8826 if allow_empty && self.peek_token().token == Token::RParen {
8827 self.next_token();
8828 Ok(vec![])
8829 } else {
8830 let cols = self.parse_comma_separated(|p| p.parse_identifier(false))?;
8831 self.expect_token(&Token::RParen)?;
8832 Ok(cols)
8833 }
8834 } else if optional == Optional {
8835 Ok(vec![])
8836 } else {
8837 self.expected("a list of columns in parentheses", self.peek_token())
8838 }
8839 }
8840
8841 fn parse_table_alias_column_defs(&mut self) -> Result<Vec<TableAliasColumnDef>, ParserError> {
8843 if self.consume_token(&Token::LParen) {
8844 let cols = self.parse_comma_separated(|p| {
8845 let name = p.parse_identifier(false)?;
8846 let data_type = p.maybe_parse(|p| p.parse_data_type())?;
8847 Ok(TableAliasColumnDef { name, data_type })
8848 })?;
8849 self.expect_token(&Token::RParen)?;
8850 Ok(cols)
8851 } else {
8852 Ok(vec![])
8853 }
8854 }
8855
8856 pub fn parse_precision(&mut self) -> Result<u64, ParserError> {
8857 self.expect_token(&Token::LParen)?;
8858 let n = self.parse_literal_uint()?;
8859 self.expect_token(&Token::RParen)?;
8860 Ok(n)
8861 }
8862
8863 pub fn parse_optional_precision(&mut self) -> Result<Option<u64>, ParserError> {
8864 if self.consume_token(&Token::LParen) {
8865 let n = self.parse_literal_uint()?;
8866 self.expect_token(&Token::RParen)?;
8867 Ok(Some(n))
8868 } else {
8869 Ok(None)
8870 }
8871 }
8872
8873 pub fn parse_datetime_64(&mut self) -> Result<(u64, Option<String>), ParserError> {
8881 self.expect_keyword(Keyword::DATETIME64)?;
8882 self.expect_token(&Token::LParen)?;
8883 let precision = self.parse_literal_uint()?;
8884 let time_zone = if self.consume_token(&Token::Comma) {
8885 Some(self.parse_literal_string()?)
8886 } else {
8887 None
8888 };
8889 self.expect_token(&Token::RParen)?;
8890 Ok((precision, time_zone))
8891 }
8892
8893 pub fn parse_optional_character_length(
8894 &mut self,
8895 ) -> Result<Option<CharacterLength>, ParserError> {
8896 if self.consume_token(&Token::LParen) {
8897 let character_length = self.parse_character_length()?;
8898 self.expect_token(&Token::RParen)?;
8899 Ok(Some(character_length))
8900 } else {
8901 Ok(None)
8902 }
8903 }
8904
8905 pub fn parse_character_length(&mut self) -> Result<CharacterLength, ParserError> {
8906 if self.parse_keyword(Keyword::MAX) {
8907 return Ok(CharacterLength::Max);
8908 }
8909 let length = self.parse_literal_uint()?;
8910 let unit = if self.parse_keyword(Keyword::CHARACTERS) {
8911 Some(CharLengthUnits::Characters)
8912 } else if self.parse_keyword(Keyword::OCTETS) {
8913 Some(CharLengthUnits::Octets)
8914 } else {
8915 None
8916 };
8917 Ok(CharacterLength::IntegerLength { length, unit })
8918 }
8919
8920 pub fn parse_optional_precision_scale(
8921 &mut self,
8922 ) -> Result<(Option<u64>, Option<u64>), ParserError> {
8923 if self.consume_token(&Token::LParen) {
8924 let n = self.parse_literal_uint()?;
8925 let scale = if self.consume_token(&Token::Comma) {
8926 Some(self.parse_literal_uint()?)
8927 } else {
8928 None
8929 };
8930 self.expect_token(&Token::RParen)?;
8931 Ok((Some(n), scale))
8932 } else {
8933 Ok((None, None))
8934 }
8935 }
8936
8937 pub fn parse_exact_number_optional_precision_scale(
8938 &mut self,
8939 ) -> Result<ExactNumberInfo, ParserError> {
8940 if self.consume_token(&Token::LParen) {
8941 let precision = self.parse_literal_uint()?;
8942 let scale = if self.consume_token(&Token::Comma) {
8943 Some(self.parse_literal_uint()?)
8944 } else {
8945 None
8946 };
8947
8948 self.expect_token(&Token::RParen)?;
8949
8950 match scale {
8951 None => Ok(ExactNumberInfo::Precision(precision)),
8952 Some(scale) => Ok(ExactNumberInfo::PrecisionAndScale(precision, scale)),
8953 }
8954 } else {
8955 Ok(ExactNumberInfo::None)
8956 }
8957 }
8958
8959 pub fn parse_optional_type_modifiers(&mut self) -> Result<Option<Vec<String>>, ParserError> {
8960 if self.consume_token(&Token::LParen) {
8961 let mut modifiers = Vec::new();
8962 loop {
8963 let next_token = self.next_token();
8964 match next_token.token {
8965 Token::Word(w) => modifiers.push(w.to_string()),
8966 Token::Number(n, _) => modifiers.push(n),
8967 Token::SingleQuotedString(s) => modifiers.push(s),
8968
8969 Token::Comma => {
8970 continue;
8971 }
8972 Token::RParen => {
8973 break;
8974 }
8975 _ => self.expected("type modifiers", next_token)?,
8976 }
8977 }
8978
8979 Ok(Some(modifiers))
8980 } else {
8981 Ok(None)
8982 }
8983 }
8984
8985 fn parse_sub_type<F>(&mut self, parent_type: F) -> Result<DataType, ParserError>
8987 where
8988 F: FnOnce(Box<DataType>) -> DataType,
8989 {
8990 self.expect_token(&Token::LParen)?;
8991 let inside_type = self.parse_data_type()?;
8992 self.expect_token(&Token::RParen)?;
8993 Ok(parent_type(inside_type.into()))
8994 }
8995
8996 pub fn parse_delete(&mut self) -> Result<Statement, ParserError> {
8997 let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) {
8998 if dialect_of!(self is BigQueryDialect | GenericDialect) {
9001 (vec![], false)
9002 } else {
9003 let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
9004 self.expect_keyword(Keyword::FROM)?;
9005 (tables, true)
9006 }
9007 } else {
9008 (vec![], true)
9009 };
9010
9011 let from = self.parse_comma_separated(Parser::parse_table_and_joins)?;
9012 let using = if self.parse_keyword(Keyword::USING) {
9013 Some(self.parse_comma_separated(Parser::parse_table_and_joins)?)
9014 } else {
9015 None
9016 };
9017 let selection = if self.parse_keyword(Keyword::WHERE) {
9018 Some(self.parse_expr()?)
9019 } else {
9020 None
9021 };
9022 let returning = if self.parse_keyword(Keyword::RETURNING) {
9023 Some(self.parse_comma_separated(Parser::parse_select_item)?)
9024 } else {
9025 None
9026 };
9027 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
9028 self.parse_comma_separated(Parser::parse_order_by_expr)?
9029 } else {
9030 vec![]
9031 };
9032 let limit = if self.parse_keyword(Keyword::LIMIT) {
9033 self.parse_limit()?
9034 } else {
9035 None
9036 };
9037
9038 Ok(Statement::Delete(Delete {
9039 tables,
9040 from: if with_from_keyword {
9041 FromTable::WithFromKeyword(from)
9042 } else {
9043 FromTable::WithoutKeyword(from)
9044 },
9045 using,
9046 selection,
9047 returning,
9048 order_by,
9049 limit,
9050 }))
9051 }
9052
9053 pub fn parse_kill(&mut self) -> Result<Statement, ParserError> {
9055 let modifier_keyword =
9056 self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]);
9057
9058 let id = self.parse_literal_uint()?;
9059
9060 let modifier = match modifier_keyword {
9061 Some(Keyword::CONNECTION) => Some(KillType::Connection),
9062 Some(Keyword::QUERY) => Some(KillType::Query),
9063 Some(Keyword::MUTATION) => {
9064 if dialect_of!(self is ClickHouseDialect | GenericDialect) {
9065 Some(KillType::Mutation)
9066 } else {
9067 self.expected(
9068 "Unsupported type for KILL, allowed: CONNECTION | QUERY",
9069 self.peek_token(),
9070 )?
9071 }
9072 }
9073 _ => None,
9074 };
9075
9076 Ok(Statement::Kill { modifier, id })
9077 }
9078
9079 pub fn parse_explain(
9080 &mut self,
9081 describe_alias: DescribeAlias,
9082 ) -> Result<Statement, ParserError> {
9083 let mut analyze = false;
9084 let mut verbose = false;
9085 let mut query_plan = false;
9086 let mut format = None;
9087 let mut options = None;
9088
9089 if describe_alias == DescribeAlias::Explain
9092 && self.dialect.supports_explain_with_utility_options()
9093 && self.peek_token().token == Token::LParen
9094 {
9095 options = Some(self.parse_utility_options()?)
9096 } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) {
9097 query_plan = true;
9098 } else {
9099 analyze = self.parse_keyword(Keyword::ANALYZE);
9100 verbose = self.parse_keyword(Keyword::VERBOSE);
9101 if self.parse_keyword(Keyword::FORMAT) {
9102 format = Some(self.parse_analyze_format()?);
9103 }
9104 }
9105
9106 match self.maybe_parse(|parser| parser.parse_statement())? {
9107 Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
9108 ParserError::ParserError("Explain must be root of the plan".to_string()),
9109 ),
9110 Some(statement) => Ok(Statement::Explain {
9111 describe_alias,
9112 analyze,
9113 verbose,
9114 query_plan,
9115 statement: Box::new(statement),
9116 format,
9117 options,
9118 }),
9119 _ => {
9120 let hive_format =
9121 match self.parse_one_of_keywords(&[Keyword::EXTENDED, Keyword::FORMATTED]) {
9122 Some(Keyword::EXTENDED) => Some(HiveDescribeFormat::Extended),
9123 Some(Keyword::FORMATTED) => Some(HiveDescribeFormat::Formatted),
9124 _ => None,
9125 };
9126
9127 let has_table_keyword = if self.dialect.describe_requires_table_keyword() {
9128 self.parse_keyword(Keyword::TABLE)
9130 } else {
9131 false
9132 };
9133
9134 let table_name = self.parse_object_name(false)?;
9135 Ok(Statement::ExplainTable {
9136 describe_alias,
9137 hive_format,
9138 has_table_keyword,
9139 table_name,
9140 })
9141 }
9142 }
9143 }
9144
9145 pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
9150 let _guard = self.recursion_counter.try_decrease()?;
9151 let with = if let Some(with_token) = self.parse_keyword_token(Keyword::WITH) {
9152 Some(With {
9153 with_token: with_token.into(),
9154 recursive: self.parse_keyword(Keyword::RECURSIVE),
9155 cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
9156 })
9157 } else {
9158 None
9159 };
9160 if self.parse_keyword(Keyword::INSERT) {
9161 Ok(Query {
9162 with,
9163 body: self.parse_insert_setexpr_boxed()?,
9164 limit: None,
9165 limit_by: vec![],
9166 order_by: None,
9167 offset: None,
9168 fetch: None,
9169 locks: vec![],
9170 for_clause: None,
9171 settings: None,
9172 format_clause: None,
9173 }
9174 .into())
9175 } else if self.parse_keyword(Keyword::UPDATE) {
9176 Ok(Query {
9177 with,
9178 body: self.parse_update_setexpr_boxed()?,
9179 limit: None,
9180 limit_by: vec![],
9181 order_by: None,
9182 offset: None,
9183 fetch: None,
9184 locks: vec![],
9185 for_clause: None,
9186 settings: None,
9187 format_clause: None,
9188 }
9189 .into())
9190 } else {
9191 let body = self.parse_query_body(self.dialect.prec_unknown())?;
9192
9193 let order_by = self.parse_optional_order_by()?;
9194
9195 let mut limit = None;
9196 let mut offset = None;
9197
9198 for _x in 0..2 {
9199 if limit.is_none() && self.parse_keyword(Keyword::LIMIT) {
9200 limit = self.parse_limit()?
9201 }
9202
9203 if offset.is_none() && self.parse_keyword(Keyword::OFFSET) {
9204 offset = Some(self.parse_offset()?)
9205 }
9206
9207 if self.dialect.supports_limit_comma()
9208 && limit.is_some()
9209 && offset.is_none()
9210 && self.consume_token(&Token::Comma)
9211 {
9212 offset = Some(Offset {
9215 value: limit.unwrap(),
9216 rows: OffsetRows::None,
9217 });
9218 limit = Some(self.parse_expr()?);
9219 }
9220 }
9221
9222 let limit_by = if dialect_of!(self is ClickHouseDialect | GenericDialect)
9223 && self.parse_keyword(Keyword::BY)
9224 {
9225 self.parse_comma_separated(Parser::parse_expr)?
9226 } else {
9227 vec![]
9228 };
9229
9230 let settings = self.parse_settings()?;
9231
9232 let fetch = if self.parse_keyword(Keyword::FETCH) {
9233 Some(self.parse_fetch()?)
9234 } else {
9235 None
9236 };
9237
9238 let mut for_clause = None;
9239 let mut locks = Vec::new();
9240 while self.parse_keyword(Keyword::FOR) {
9241 if let Some(parsed_for_clause) = self.parse_for_clause()? {
9242 for_clause = Some(parsed_for_clause);
9243 break;
9244 } else {
9245 locks.push(self.parse_lock()?);
9246 }
9247 }
9248 let format_clause = if dialect_of!(self is ClickHouseDialect | GenericDialect)
9249 && self.parse_keyword(Keyword::FORMAT)
9250 {
9251 if self.parse_keyword(Keyword::NULL) {
9252 Some(FormatClause::Null)
9253 } else {
9254 let ident = self.parse_identifier(false)?;
9255 Some(FormatClause::Identifier(ident))
9256 }
9257 } else {
9258 None
9259 };
9260
9261 Ok(Query {
9262 with,
9263 body,
9264 order_by,
9265 limit,
9266 limit_by,
9267 offset,
9268 fetch,
9269 locks,
9270 for_clause,
9271 settings,
9272 format_clause,
9273 }
9274 .into())
9275 }
9276 }
9277
9278 fn parse_settings(&mut self) -> Result<Option<Vec<Setting>>, ParserError> {
9279 let settings = if dialect_of!(self is ClickHouseDialect|GenericDialect)
9280 && self.parse_keyword(Keyword::SETTINGS)
9281 {
9282 let key_values = self.parse_comma_separated(|p| {
9283 let key = p.parse_identifier(false)?;
9284 p.expect_token(&Token::Eq)?;
9285 let value = p.parse_value()?;
9286 Ok(Setting { key, value })
9287 })?;
9288 Some(key_values)
9289 } else {
9290 None
9291 };
9292 Ok(settings)
9293 }
9294
9295 pub fn parse_for_clause(&mut self) -> Result<Option<ForClause>, ParserError> {
9297 if self.parse_keyword(Keyword::XML) {
9298 Ok(Some(self.parse_for_xml()?))
9299 } else if self.parse_keyword(Keyword::JSON) {
9300 Ok(Some(self.parse_for_json()?))
9301 } else if self.parse_keyword(Keyword::BROWSE) {
9302 Ok(Some(ForClause::Browse))
9303 } else {
9304 Ok(None)
9305 }
9306 }
9307
9308 pub fn parse_for_xml(&mut self) -> Result<ForClause, ParserError> {
9310 let for_xml = if self.parse_keyword(Keyword::RAW) {
9311 let mut element_name = None;
9312 if self.peek_token().token == Token::LParen {
9313 self.expect_token(&Token::LParen)?;
9314 element_name = Some(self.parse_literal_string()?);
9315 self.expect_token(&Token::RParen)?;
9316 }
9317 ForXml::Raw(element_name)
9318 } else if self.parse_keyword(Keyword::AUTO) {
9319 ForXml::Auto
9320 } else if self.parse_keyword(Keyword::EXPLICIT) {
9321 ForXml::Explicit
9322 } else if self.parse_keyword(Keyword::PATH) {
9323 let mut element_name = None;
9324 if self.peek_token().token == Token::LParen {
9325 self.expect_token(&Token::LParen)?;
9326 element_name = Some(self.parse_literal_string()?);
9327 self.expect_token(&Token::RParen)?;
9328 }
9329 ForXml::Path(element_name)
9330 } else {
9331 return Err(ParserError::ParserError(
9332 "Expected FOR XML [RAW | AUTO | EXPLICIT | PATH ]".to_string(),
9333 ));
9334 };
9335 let mut elements = false;
9336 let mut binary_base64 = false;
9337 let mut root = None;
9338 let mut r#type = false;
9339 while self.peek_token().token == Token::Comma {
9340 self.next_token();
9341 if self.parse_keyword(Keyword::ELEMENTS) {
9342 elements = true;
9343 } else if self.parse_keyword(Keyword::BINARY) {
9344 self.expect_keyword(Keyword::BASE64)?;
9345 binary_base64 = true;
9346 } else if self.parse_keyword(Keyword::ROOT) {
9347 self.expect_token(&Token::LParen)?;
9348 root = Some(self.parse_literal_string()?);
9349 self.expect_token(&Token::RParen)?;
9350 } else if self.parse_keyword(Keyword::TYPE) {
9351 r#type = true;
9352 }
9353 }
9354 Ok(ForClause::Xml {
9355 for_xml,
9356 elements,
9357 binary_base64,
9358 root,
9359 r#type,
9360 })
9361 }
9362
9363 pub fn parse_for_json(&mut self) -> Result<ForClause, ParserError> {
9365 let for_json = if self.parse_keyword(Keyword::AUTO) {
9366 ForJson::Auto
9367 } else if self.parse_keyword(Keyword::PATH) {
9368 ForJson::Path
9369 } else {
9370 return Err(ParserError::ParserError(
9371 "Expected FOR JSON [AUTO | PATH ]".to_string(),
9372 ));
9373 };
9374 let mut root = None;
9375 let mut include_null_values = false;
9376 let mut without_array_wrapper = false;
9377 while self.peek_token().token == Token::Comma {
9378 self.next_token();
9379 if self.parse_keyword(Keyword::ROOT) {
9380 self.expect_token(&Token::LParen)?;
9381 root = Some(self.parse_literal_string()?);
9382 self.expect_token(&Token::RParen)?;
9383 } else if self.parse_keyword(Keyword::INCLUDE_NULL_VALUES) {
9384 include_null_values = true;
9385 } else if self.parse_keyword(Keyword::WITHOUT_ARRAY_WRAPPER) {
9386 without_array_wrapper = true;
9387 }
9388 }
9389 Ok(ForClause::Json {
9390 for_json,
9391 root,
9392 include_null_values,
9393 without_array_wrapper,
9394 })
9395 }
9396
9397 pub fn parse_cte(&mut self) -> Result<Cte, ParserError> {
9399 let name = self.parse_identifier(false)?;
9400
9401 let mut cte = if self.parse_keyword(Keyword::AS) {
9402 let mut is_materialized = None;
9403 if dialect_of!(self is PostgreSqlDialect) {
9404 if self.parse_keyword(Keyword::MATERIALIZED) {
9405 is_materialized = Some(CteAsMaterialized::Materialized);
9406 } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
9407 is_materialized = Some(CteAsMaterialized::NotMaterialized);
9408 }
9409 }
9410 self.expect_token(&Token::LParen)?;
9411
9412 let query = self.parse_query()?;
9413 let closing_paren_token = self.expect_token(&Token::RParen)?;
9414
9415 let alias = TableAlias {
9416 name,
9417 columns: vec![],
9418 };
9419 Cte {
9420 alias,
9421 query,
9422 from: None,
9423 materialized: is_materialized,
9424 closing_paren_token: closing_paren_token.into(),
9425 }
9426 } else {
9427 let columns = self.parse_table_alias_column_defs()?;
9428 self.expect_keyword(Keyword::AS)?;
9429 let mut is_materialized = None;
9430 if dialect_of!(self is PostgreSqlDialect) {
9431 if self.parse_keyword(Keyword::MATERIALIZED) {
9432 is_materialized = Some(CteAsMaterialized::Materialized);
9433 } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
9434 is_materialized = Some(CteAsMaterialized::NotMaterialized);
9435 }
9436 }
9437 self.expect_token(&Token::LParen)?;
9438
9439 let query = self.parse_query()?;
9440 let closing_paren_token = self.expect_token(&Token::RParen)?;
9441
9442 let alias = TableAlias { name, columns };
9443 Cte {
9444 alias,
9445 query,
9446 from: None,
9447 materialized: is_materialized,
9448 closing_paren_token: closing_paren_token.into(),
9449 }
9450 };
9451 if self.parse_keyword(Keyword::FROM) {
9452 cte.from = Some(self.parse_identifier(false)?);
9453 }
9454 Ok(cte)
9455 }
9456
9457 pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
9466 let expr = if self.peek_keyword(Keyword::SELECT) {
9469 SetExpr::Select(self.parse_select().map(Box::new)?)
9470 } else if self.consume_token(&Token::LParen) {
9471 let subquery = self.parse_query()?;
9473 self.expect_token(&Token::RParen)?;
9474 SetExpr::Query(subquery)
9475 } else if self.parse_keyword(Keyword::VALUES) {
9476 let is_mysql = dialect_of!(self is MySqlDialect);
9477 SetExpr::Values(self.parse_values(is_mysql)?)
9478 } else if self.parse_keyword(Keyword::TABLE) {
9479 SetExpr::Table(Box::new(self.parse_as_table()?))
9480 } else {
9481 return self.expected(
9482 "SELECT, VALUES, or a subquery in the query body",
9483 self.peek_token(),
9484 );
9485 };
9486
9487 self.parse_remaining_set_exprs(expr, precedence)
9488 }
9489
9490 fn parse_remaining_set_exprs(
9494 &mut self,
9495 mut expr: SetExpr,
9496 precedence: u8,
9497 ) -> Result<Box<SetExpr>, ParserError> {
9498 loop {
9499 let op = self.parse_set_operator(&self.peek_token().token);
9501 let next_precedence = match op {
9502 Some(SetOperator::Union) | Some(SetOperator::Except) => 10,
9504 Some(SetOperator::Intersect) => 20,
9506 None => break,
9508 };
9509 if precedence >= next_precedence {
9510 break;
9511 }
9512 self.next_token(); let set_quantifier = self.parse_set_quantifier(&op);
9514 expr = SetExpr::SetOperation {
9515 left: Box::new(expr),
9516 op: op.unwrap(),
9517 set_quantifier,
9518 right: self.parse_query_body(next_precedence)?,
9519 };
9520 }
9521
9522 Ok(expr.into())
9523 }
9524
9525 pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
9526 match token {
9527 Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union),
9528 Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except),
9529 Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect),
9530 _ => None,
9531 }
9532 }
9533
9534 pub fn parse_set_quantifier(&mut self, op: &Option<SetOperator>) -> SetQuantifier {
9535 match op {
9536 Some(SetOperator::Except | SetOperator::Intersect | SetOperator::Union) => {
9537 if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) {
9538 SetQuantifier::DistinctByName
9539 } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
9540 SetQuantifier::ByName
9541 } else if self.parse_keyword(Keyword::ALL) {
9542 if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
9543 SetQuantifier::AllByName
9544 } else {
9545 SetQuantifier::All
9546 }
9547 } else if self.parse_keyword(Keyword::DISTINCT) {
9548 SetQuantifier::Distinct
9549 } else {
9550 SetQuantifier::None
9551 }
9552 }
9553 _ => SetQuantifier::None,
9554 }
9555 }
9556
9557 pub fn parse_select(&mut self) -> Result<Select, ParserError> {
9559 let select_token = self.expect_keyword(Keyword::SELECT)?;
9560 let value_table_mode =
9561 if dialect_of!(self is BigQueryDialect) && self.parse_keyword(Keyword::AS) {
9562 if self.parse_keyword(Keyword::VALUE) {
9563 Some(ValueTableMode::AsValue)
9564 } else if self.parse_keyword(Keyword::STRUCT) {
9565 Some(ValueTableMode::AsStruct)
9566 } else {
9567 self.expected("VALUE or STRUCT", self.peek_token())?
9568 }
9569 } else {
9570 None
9571 };
9572
9573 let mut top_before_distinct = false;
9574 let mut top = None;
9575 if self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
9576 top = Some(self.parse_top()?);
9577 top_before_distinct = true;
9578 }
9579 let distinct = self.parse_all_or_distinct()?;
9580 if !self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
9581 top = Some(self.parse_top()?);
9582 }
9583
9584 let projection = self.parse_projection()?;
9585
9586 let into = if self.parse_keyword(Keyword::INTO) {
9587 let temporary = self
9588 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
9589 .is_some();
9590 let unlogged = self.parse_keyword(Keyword::UNLOGGED);
9591 let table = self.parse_keyword(Keyword::TABLE);
9592 let name = self.parse_object_name(false)?;
9593 Some(SelectInto {
9594 temporary,
9595 unlogged,
9596 table,
9597 name,
9598 })
9599 } else {
9600 None
9601 };
9602
9603 let from = if self.parse_keyword(Keyword::FROM) {
9609 self.parse_comma_separated(Parser::parse_table_and_joins)?
9610 } else {
9611 vec![]
9612 };
9613
9614 let mut lateral_views = vec![];
9615 loop {
9616 if self.parse_keywords(&[Keyword::LATERAL, Keyword::VIEW]) {
9617 let outer = self.parse_keyword(Keyword::OUTER);
9618 let lateral_view = self.parse_expr()?;
9619 let lateral_view_name = self.parse_object_name(false)?;
9620 let lateral_col_alias = self
9621 .parse_comma_separated(|parser| {
9622 parser.parse_optional_alias(&[
9623 Keyword::WHERE,
9624 Keyword::GROUP,
9625 Keyword::CLUSTER,
9626 Keyword::HAVING,
9627 Keyword::LATERAL,
9628 ]) })?
9630 .into_iter()
9631 .flatten()
9632 .collect();
9633
9634 lateral_views.push(LateralView {
9635 lateral_view,
9636 lateral_view_name,
9637 lateral_col_alias,
9638 outer,
9639 });
9640 } else {
9641 break;
9642 }
9643 }
9644
9645 let prewhere = if dialect_of!(self is ClickHouseDialect|GenericDialect)
9646 && self.parse_keyword(Keyword::PREWHERE)
9647 {
9648 Some(self.parse_expr()?)
9649 } else {
9650 None
9651 };
9652
9653 let selection = if self.parse_keyword(Keyword::WHERE) {
9654 Some(self.parse_expr()?)
9655 } else {
9656 None
9657 };
9658
9659 let group_by = self
9660 .parse_optional_group_by()?
9661 .unwrap_or_else(|| GroupByExpr::Expressions(vec![], vec![]));
9662
9663 let cluster_by = if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
9664 self.parse_comma_separated(Parser::parse_expr)?
9665 } else {
9666 vec![]
9667 };
9668
9669 let distribute_by = if self.parse_keywords(&[Keyword::DISTRIBUTE, Keyword::BY]) {
9670 self.parse_comma_separated(Parser::parse_expr)?
9671 } else {
9672 vec![]
9673 };
9674
9675 let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
9676 self.parse_comma_separated(Parser::parse_expr)?
9677 } else {
9678 vec![]
9679 };
9680
9681 let having = if self.parse_keyword(Keyword::HAVING) {
9682 Some(self.parse_expr()?)
9683 } else {
9684 None
9685 };
9686
9687 let (named_windows, qualify, window_before_qualify) = if self.parse_keyword(Keyword::WINDOW)
9689 {
9690 let named_windows = self.parse_comma_separated(Parser::parse_named_window)?;
9691 if self.parse_keyword(Keyword::QUALIFY) {
9692 (named_windows, Some(self.parse_expr()?), true)
9693 } else {
9694 (named_windows, None, true)
9695 }
9696 } else if self.parse_keyword(Keyword::QUALIFY) {
9697 let qualify = Some(self.parse_expr()?);
9698 if self.parse_keyword(Keyword::WINDOW) {
9699 (
9700 self.parse_comma_separated(Parser::parse_named_window)?,
9701 qualify,
9702 false,
9703 )
9704 } else {
9705 (Default::default(), qualify, false)
9706 }
9707 } else {
9708 Default::default()
9709 };
9710
9711 let connect_by = if self.dialect.supports_connect_by()
9712 && self
9713 .parse_one_of_keywords(&[Keyword::START, Keyword::CONNECT])
9714 .is_some()
9715 {
9716 self.prev_token();
9717 Some(self.parse_connect_by()?)
9718 } else {
9719 None
9720 };
9721
9722 Ok(Select {
9723 select_token: AttachedToken(select_token),
9724 distinct,
9725 top,
9726 top_before_distinct,
9727 projection,
9728 into,
9729 from,
9730 lateral_views,
9731 prewhere,
9732 selection,
9733 group_by,
9734 cluster_by,
9735 distribute_by,
9736 sort_by,
9737 having,
9738 named_window: named_windows,
9739 window_before_qualify,
9740 qualify,
9741 value_table_mode,
9742 connect_by,
9743 })
9744 }
9745
9746 fn with_state<T, F>(&mut self, state: ParserState, mut f: F) -> Result<T, ParserError>
9750 where
9751 F: FnMut(&mut Parser) -> Result<T, ParserError>,
9752 {
9753 let current_state = self.state;
9754 self.state = state;
9755 let res = f(self);
9756 self.state = current_state;
9757 res
9758 }
9759
9760 pub fn parse_connect_by(&mut self) -> Result<ConnectBy, ParserError> {
9761 let (condition, relationships) = if self.parse_keywords(&[Keyword::CONNECT, Keyword::BY]) {
9762 let relationships = self.with_state(ParserState::ConnectBy, |parser| {
9763 parser.parse_comma_separated(Parser::parse_expr)
9764 })?;
9765 self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
9766 let condition = self.parse_expr()?;
9767 (condition, relationships)
9768 } else {
9769 self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
9770 let condition = self.parse_expr()?;
9771 self.expect_keywords(&[Keyword::CONNECT, Keyword::BY])?;
9772 let relationships = self.with_state(ParserState::ConnectBy, |parser| {
9773 parser.parse_comma_separated(Parser::parse_expr)
9774 })?;
9775 (condition, relationships)
9776 };
9777 Ok(ConnectBy {
9778 condition,
9779 relationships,
9780 })
9781 }
9782
9783 pub fn parse_as_table(&mut self) -> Result<Table, ParserError> {
9785 let token1 = self.next_token();
9786 let token2 = self.next_token();
9787 let token3 = self.next_token();
9788
9789 let table_name;
9790 let schema_name;
9791 if token2 == Token::Period {
9792 match token1.token {
9793 Token::Word(w) => {
9794 schema_name = w.value;
9795 }
9796 _ => {
9797 return self.expected("Schema name", token1);
9798 }
9799 }
9800 match token3.token {
9801 Token::Word(w) => {
9802 table_name = w.value;
9803 }
9804 _ => {
9805 return self.expected("Table name", token3);
9806 }
9807 }
9808 Ok(Table {
9809 table_name: Some(table_name),
9810 schema_name: Some(schema_name),
9811 })
9812 } else {
9813 match token1.token {
9814 Token::Word(w) => {
9815 table_name = w.value;
9816 }
9817 _ => {
9818 return self.expected("Table name", token1);
9819 }
9820 }
9821 Ok(Table {
9822 table_name: Some(table_name),
9823 schema_name: None,
9824 })
9825 }
9826 }
9827
9828 fn parse_set_role(&mut self, modifier: Option<Keyword>) -> Result<Statement, ParserError> {
9830 self.expect_keyword(Keyword::ROLE)?;
9831 let context_modifier = match modifier {
9832 Some(Keyword::LOCAL) => ContextModifier::Local,
9833 Some(Keyword::SESSION) => ContextModifier::Session,
9834 _ => ContextModifier::None,
9835 };
9836
9837 let role_name = if self.parse_keyword(Keyword::NONE) {
9838 None
9839 } else {
9840 Some(self.parse_identifier(false)?)
9841 };
9842 Ok(Statement::SetRole {
9843 context_modifier,
9844 role_name,
9845 })
9846 }
9847
9848 pub fn parse_set(&mut self) -> Result<Statement, ParserError> {
9849 let modifier =
9850 self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::HIVEVAR]);
9851 if let Some(Keyword::HIVEVAR) = modifier {
9852 self.expect_token(&Token::Colon)?;
9853 } else if let Some(set_role_stmt) =
9854 self.maybe_parse(|parser| parser.parse_set_role(modifier))?
9855 {
9856 return Ok(set_role_stmt);
9857 }
9858
9859 let variables = if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE]) {
9860 OneOrManyWithParens::One(ObjectName(vec!["TIMEZONE".into()]))
9861 } else if self.dialect.supports_parenthesized_set_variables()
9862 && self.consume_token(&Token::LParen)
9863 {
9864 let variables = OneOrManyWithParens::Many(
9865 self.parse_comma_separated(|parser: &mut Parser<'a>| {
9866 parser.parse_identifier(false)
9867 })?
9868 .into_iter()
9869 .map(|ident| ObjectName(vec![ident]))
9870 .collect(),
9871 );
9872 self.expect_token(&Token::RParen)?;
9873 variables
9874 } else {
9875 OneOrManyWithParens::One(self.parse_object_name(false)?)
9876 };
9877
9878 if matches!(&variables, OneOrManyWithParens::One(variable) if variable.to_string().eq_ignore_ascii_case("NAMES")
9879 && dialect_of!(self is MySqlDialect | GenericDialect))
9880 {
9881 if self.parse_keyword(Keyword::DEFAULT) {
9882 return Ok(Statement::SetNamesDefault {});
9883 }
9884
9885 let charset_name = self.parse_literal_string()?;
9886 let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() {
9887 Some(self.parse_literal_string()?)
9888 } else {
9889 None
9890 };
9891
9892 return Ok(Statement::SetNames {
9893 charset_name,
9894 collation_name,
9895 });
9896 }
9897
9898 let parenthesized_assignment = matches!(&variables, OneOrManyWithParens::Many(_));
9899
9900 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
9901 if parenthesized_assignment {
9902 self.expect_token(&Token::LParen)?;
9903 }
9904
9905 let mut values = vec![];
9906 loop {
9907 let value = if let Some(expr) = self.try_parse_expr_sub_query()? {
9908 expr
9909 } else if let Ok(expr) = self.parse_expr() {
9910 expr
9911 } else {
9912 self.expected("variable value", self.peek_token())?
9913 };
9914
9915 values.push(value);
9916 if self.consume_token(&Token::Comma) {
9917 continue;
9918 }
9919
9920 if parenthesized_assignment {
9921 self.expect_token(&Token::RParen)?;
9922 }
9923 return Ok(Statement::SetVariable {
9924 local: modifier == Some(Keyword::LOCAL),
9925 hivevar: Some(Keyword::HIVEVAR) == modifier,
9926 variables,
9927 value: values,
9928 });
9929 }
9930 }
9931
9932 let OneOrManyWithParens::One(variable) = variables else {
9933 return self.expected("set variable", self.peek_token());
9934 };
9935
9936 if variable.to_string().eq_ignore_ascii_case("TIMEZONE") {
9937 match self.parse_expr() {
9939 Ok(expr) => Ok(Statement::SetTimeZone {
9940 local: modifier == Some(Keyword::LOCAL),
9941 value: expr,
9942 }),
9943 _ => self.expected("timezone value", self.peek_token())?,
9944 }
9945 } else if variable.to_string() == "CHARACTERISTICS" {
9946 self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?;
9947 Ok(Statement::SetTransaction {
9948 modes: self.parse_transaction_modes()?,
9949 snapshot: None,
9950 session: true,
9951 })
9952 } else if variable.to_string() == "TRANSACTION" && modifier.is_none() {
9953 if self.parse_keyword(Keyword::SNAPSHOT) {
9954 let snapshot_id = self.parse_value()?;
9955 return Ok(Statement::SetTransaction {
9956 modes: vec![],
9957 snapshot: Some(snapshot_id),
9958 session: false,
9959 });
9960 }
9961 Ok(Statement::SetTransaction {
9962 modes: self.parse_transaction_modes()?,
9963 snapshot: None,
9964 session: false,
9965 })
9966 } else {
9967 self.expected("equals sign or TO", self.peek_token())
9968 }
9969 }
9970
9971 pub fn parse_show(&mut self) -> Result<Statement, ParserError> {
9972 let terse = self.parse_keyword(Keyword::TERSE);
9973 let extended = self.parse_keyword(Keyword::EXTENDED);
9974 let full = self.parse_keyword(Keyword::FULL);
9975 let session = self.parse_keyword(Keyword::SESSION);
9976 let global = self.parse_keyword(Keyword::GLOBAL);
9977 let external = self.parse_keyword(Keyword::EXTERNAL);
9978 if self
9979 .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS])
9980 .is_some()
9981 {
9982 Ok(self.parse_show_columns(extended, full)?)
9983 } else if self.parse_keyword(Keyword::TABLES) {
9984 Ok(self.parse_show_tables(terse, extended, full, external)?)
9985 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) {
9986 Ok(self.parse_show_views(terse, true)?)
9987 } else if self.parse_keyword(Keyword::VIEWS) {
9988 Ok(self.parse_show_views(terse, false)?)
9989 } else if self.parse_keyword(Keyword::FUNCTIONS) {
9990 Ok(self.parse_show_functions()?)
9991 } else if extended || full {
9992 Err(ParserError::ParserError(
9993 "EXTENDED/FULL are not supported with this type of SHOW query".to_string(),
9994 ))
9995 } else if self.parse_one_of_keywords(&[Keyword::CREATE]).is_some() {
9996 Ok(self.parse_show_create()?)
9997 } else if self.parse_keyword(Keyword::COLLATION) {
9998 Ok(self.parse_show_collation()?)
9999 } else if self.parse_keyword(Keyword::VARIABLES)
10000 && dialect_of!(self is MySqlDialect | GenericDialect)
10001 {
10002 Ok(Statement::ShowVariables {
10003 filter: self.parse_show_statement_filter()?,
10004 session,
10005 global,
10006 })
10007 } else if self.parse_keyword(Keyword::STATUS)
10008 && dialect_of!(self is MySqlDialect | GenericDialect)
10009 {
10010 Ok(Statement::ShowStatus {
10011 filter: self.parse_show_statement_filter()?,
10012 session,
10013 global,
10014 })
10015 } else if self.parse_keyword(Keyword::DATABASES) {
10016 self.parse_show_databases(terse)
10017 } else if self.parse_keyword(Keyword::SCHEMAS) {
10018 self.parse_show_schemas(terse)
10019 } else {
10020 Ok(Statement::ShowVariable {
10021 variable: self.parse_identifiers()?,
10022 })
10023 }
10024 }
10025
10026 fn parse_show_databases(&mut self, terse: bool) -> Result<Statement, ParserError> {
10027 let history = self.parse_keyword(Keyword::HISTORY);
10028 let show_options = self.parse_show_stmt_options()?;
10029 Ok(Statement::ShowDatabases {
10030 terse,
10031 history,
10032 show_options,
10033 })
10034 }
10035
10036 fn parse_show_schemas(&mut self, terse: bool) -> Result<Statement, ParserError> {
10037 let history = self.parse_keyword(Keyword::HISTORY);
10038 let show_options = self.parse_show_stmt_options()?;
10039 Ok(Statement::ShowSchemas {
10040 terse,
10041 history,
10042 show_options,
10043 })
10044 }
10045
10046 pub fn parse_show_create(&mut self) -> Result<Statement, ParserError> {
10047 let obj_type = match self.expect_one_of_keywords(&[
10048 Keyword::TABLE,
10049 Keyword::TRIGGER,
10050 Keyword::FUNCTION,
10051 Keyword::PROCEDURE,
10052 Keyword::EVENT,
10053 Keyword::VIEW,
10054 ])? {
10055 Keyword::TABLE => Ok(ShowCreateObject::Table),
10056 Keyword::TRIGGER => Ok(ShowCreateObject::Trigger),
10057 Keyword::FUNCTION => Ok(ShowCreateObject::Function),
10058 Keyword::PROCEDURE => Ok(ShowCreateObject::Procedure),
10059 Keyword::EVENT => Ok(ShowCreateObject::Event),
10060 Keyword::VIEW => Ok(ShowCreateObject::View),
10061 keyword => Err(ParserError::ParserError(format!(
10062 "Unable to map keyword to ShowCreateObject: {keyword:?}"
10063 ))),
10064 }?;
10065
10066 let obj_name = self.parse_object_name(false)?;
10067
10068 Ok(Statement::ShowCreate { obj_type, obj_name })
10069 }
10070
10071 pub fn parse_show_columns(
10072 &mut self,
10073 extended: bool,
10074 full: bool,
10075 ) -> Result<Statement, ParserError> {
10076 let show_options = self.parse_show_stmt_options()?;
10077 Ok(Statement::ShowColumns {
10078 extended,
10079 full,
10080 show_options,
10081 })
10082 }
10083
10084 fn parse_show_tables(
10085 &mut self,
10086 terse: bool,
10087 extended: bool,
10088 full: bool,
10089 external: bool,
10090 ) -> Result<Statement, ParserError> {
10091 let history = !external && self.parse_keyword(Keyword::HISTORY);
10092 let show_options = self.parse_show_stmt_options()?;
10093 Ok(Statement::ShowTables {
10094 terse,
10095 history,
10096 extended,
10097 full,
10098 external,
10099 show_options,
10100 })
10101 }
10102
10103 fn parse_show_views(
10104 &mut self,
10105 terse: bool,
10106 materialized: bool,
10107 ) -> Result<Statement, ParserError> {
10108 let show_options = self.parse_show_stmt_options()?;
10109 Ok(Statement::ShowViews {
10110 materialized,
10111 terse,
10112 show_options,
10113 })
10114 }
10115
10116 pub fn parse_show_functions(&mut self) -> Result<Statement, ParserError> {
10117 let filter = self.parse_show_statement_filter()?;
10118 Ok(Statement::ShowFunctions { filter })
10119 }
10120
10121 pub fn parse_show_collation(&mut self) -> Result<Statement, ParserError> {
10122 let filter = self.parse_show_statement_filter()?;
10123 Ok(Statement::ShowCollation { filter })
10124 }
10125
10126 pub fn parse_show_statement_filter(
10127 &mut self,
10128 ) -> Result<Option<ShowStatementFilter>, ParserError> {
10129 if self.parse_keyword(Keyword::LIKE) {
10130 Ok(Some(ShowStatementFilter::Like(
10131 self.parse_literal_string()?,
10132 )))
10133 } else if self.parse_keyword(Keyword::ILIKE) {
10134 Ok(Some(ShowStatementFilter::ILike(
10135 self.parse_literal_string()?,
10136 )))
10137 } else if self.parse_keyword(Keyword::WHERE) {
10138 Ok(Some(ShowStatementFilter::Where(self.parse_expr()?)))
10139 } else {
10140 self.maybe_parse(|parser| -> Result<String, ParserError> {
10141 parser.parse_literal_string()
10142 })?
10143 .map_or(Ok(None), |filter| {
10144 Ok(Some(ShowStatementFilter::NoKeyword(filter)))
10145 })
10146 }
10147 }
10148
10149 pub fn parse_use(&mut self) -> Result<Statement, ParserError> {
10150 let parsed_keyword = if dialect_of!(self is HiveDialect) {
10152 if self.parse_keyword(Keyword::DEFAULT) {
10154 return Ok(Statement::Use(Use::Default));
10155 }
10156 None } else if dialect_of!(self is DatabricksDialect) {
10158 self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA])
10159 } else if dialect_of!(self is SnowflakeDialect) {
10160 self.parse_one_of_keywords(&[
10161 Keyword::DATABASE,
10162 Keyword::SCHEMA,
10163 Keyword::WAREHOUSE,
10164 Keyword::ROLE,
10165 Keyword::SECONDARY,
10166 ])
10167 } else {
10168 None };
10170
10171 let result = if matches!(parsed_keyword, Some(Keyword::SECONDARY)) {
10172 self.parse_secondary_roles()?
10173 } else {
10174 let obj_name = self.parse_object_name(false)?;
10175 match parsed_keyword {
10176 Some(Keyword::CATALOG) => Use::Catalog(obj_name),
10177 Some(Keyword::DATABASE) => Use::Database(obj_name),
10178 Some(Keyword::SCHEMA) => Use::Schema(obj_name),
10179 Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name),
10180 Some(Keyword::ROLE) => Use::Role(obj_name),
10181 _ => Use::Object(obj_name),
10182 }
10183 };
10184
10185 Ok(Statement::Use(result))
10186 }
10187
10188 fn parse_secondary_roles(&mut self) -> Result<Use, ParserError> {
10189 self.expect_keyword(Keyword::ROLES)?;
10190 if self.parse_keyword(Keyword::NONE) {
10191 Ok(Use::SecondaryRoles(SecondaryRoles::None))
10192 } else if self.parse_keyword(Keyword::ALL) {
10193 Ok(Use::SecondaryRoles(SecondaryRoles::All))
10194 } else {
10195 let roles = self.parse_comma_separated(|parser| parser.parse_identifier(false))?;
10196 Ok(Use::SecondaryRoles(SecondaryRoles::List(roles)))
10197 }
10198 }
10199
10200 pub fn parse_table_and_joins(&mut self) -> Result<TableWithJoins, ParserError> {
10201 let relation = self.parse_table_factor()?;
10202 let mut joins = vec![];
10206 loop {
10207 let global = self.parse_keyword(Keyword::GLOBAL);
10208 let join = if self.parse_keyword(Keyword::CROSS) {
10209 let join_operator = if self.parse_keyword(Keyword::JOIN) {
10210 JoinOperator::CrossJoin
10211 } else if self.parse_keyword(Keyword::APPLY) {
10212 JoinOperator::CrossApply
10214 } else {
10215 return self.expected("JOIN or APPLY after CROSS", self.peek_token());
10216 };
10217 Join {
10218 relation: self.parse_table_factor()?,
10219 global,
10220 join_operator,
10221 }
10222 } else if self.parse_keyword(Keyword::OUTER) {
10223 self.expect_keyword(Keyword::APPLY)?;
10225 Join {
10226 relation: self.parse_table_factor()?,
10227 global,
10228 join_operator: JoinOperator::OuterApply,
10229 }
10230 } else if self.parse_keyword(Keyword::ASOF) {
10231 self.expect_keyword(Keyword::JOIN)?;
10232 let relation = self.parse_table_factor()?;
10233 self.expect_keyword(Keyword::MATCH_CONDITION)?;
10234 let match_condition = self.parse_parenthesized(Self::parse_expr)?;
10235 Join {
10236 relation,
10237 global,
10238 join_operator: JoinOperator::AsOf {
10239 match_condition,
10240 constraint: self.parse_join_constraint(false)?,
10241 },
10242 }
10243 } else {
10244 let natural = self.parse_keyword(Keyword::NATURAL);
10245 let peek_keyword = if let Token::Word(w) = self.peek_token().token {
10246 w.keyword
10247 } else {
10248 Keyword::NoKeyword
10249 };
10250
10251 let join_operator_type = match peek_keyword {
10252 Keyword::INNER | Keyword::JOIN => {
10253 let _ = self.parse_keyword(Keyword::INNER); self.expect_keyword(Keyword::JOIN)?;
10255 JoinOperator::Inner
10256 }
10257 kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
10258 let _ = self.next_token(); let is_left = kw == Keyword::LEFT;
10260 let join_type = self.parse_one_of_keywords(&[
10261 Keyword::OUTER,
10262 Keyword::SEMI,
10263 Keyword::ANTI,
10264 Keyword::JOIN,
10265 ]);
10266 match join_type {
10267 Some(Keyword::OUTER) => {
10268 self.expect_keyword(Keyword::JOIN)?;
10269 if is_left {
10270 JoinOperator::LeftOuter
10271 } else {
10272 JoinOperator::RightOuter
10273 }
10274 }
10275 Some(Keyword::SEMI) => {
10276 self.expect_keyword(Keyword::JOIN)?;
10277 if is_left {
10278 JoinOperator::LeftSemi
10279 } else {
10280 JoinOperator::RightSemi
10281 }
10282 }
10283 Some(Keyword::ANTI) => {
10284 self.expect_keyword(Keyword::JOIN)?;
10285 if is_left {
10286 JoinOperator::LeftAnti
10287 } else {
10288 JoinOperator::RightAnti
10289 }
10290 }
10291 Some(Keyword::JOIN) => {
10292 if is_left {
10293 JoinOperator::LeftOuter
10294 } else {
10295 JoinOperator::RightOuter
10296 }
10297 }
10298 _ => {
10299 return Err(ParserError::ParserError(format!(
10300 "expected OUTER, SEMI, ANTI or JOIN after {kw:?}"
10301 )))
10302 }
10303 }
10304 }
10305 Keyword::ANTI => {
10306 let _ = self.next_token(); self.expect_keyword(Keyword::JOIN)?;
10308 JoinOperator::Anti
10309 }
10310 Keyword::SEMI => {
10311 let _ = self.next_token(); self.expect_keyword(Keyword::JOIN)?;
10313 JoinOperator::Semi
10314 }
10315 Keyword::FULL => {
10316 let _ = self.next_token(); let _ = self.parse_keyword(Keyword::OUTER); self.expect_keyword(Keyword::JOIN)?;
10319 JoinOperator::FullOuter
10320 }
10321 Keyword::OUTER => {
10322 return self.expected("LEFT, RIGHT, or FULL", self.peek_token());
10323 }
10324 _ if natural => {
10325 return self.expected("a join type after NATURAL", self.peek_token());
10326 }
10327 _ => break,
10328 };
10329 let relation = self.parse_table_factor()?;
10330 let join_constraint = self.parse_join_constraint(natural)?;
10331 Join {
10332 relation,
10333 global,
10334 join_operator: join_operator_type(join_constraint),
10335 }
10336 };
10337 joins.push(join);
10338 }
10339 Ok(TableWithJoins { relation, joins })
10340 }
10341
10342 pub fn parse_table_factor(&mut self) -> Result<TableFactor, ParserError> {
10344 if self.parse_keyword(Keyword::LATERAL) {
10345 if self.consume_token(&Token::LParen) {
10347 self.parse_derived_table_factor(Lateral)
10348 } else {
10349 let name = self.parse_object_name(false)?;
10350 self.expect_token(&Token::LParen)?;
10351 let args = self.parse_optional_args()?;
10352 let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
10353 Ok(TableFactor::Function {
10354 lateral: true,
10355 name,
10356 args,
10357 alias,
10358 })
10359 }
10360 } else if self.parse_keyword(Keyword::TABLE) {
10361 self.expect_token(&Token::LParen)?;
10363 let expr = self.parse_expr()?;
10364 self.expect_token(&Token::RParen)?;
10365 let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
10366 Ok(TableFactor::TableFunction { expr, alias })
10367 } else if self.consume_token(&Token::LParen) {
10368 if let Some(mut table) =
10390 self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))?
10391 {
10392 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT])
10393 {
10394 table = match kw {
10395 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
10396 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
10397 _ => unreachable!(),
10398 }
10399 }
10400 return Ok(table);
10401 }
10402
10403 let mut table_and_joins = self.parse_table_and_joins()?;
10410
10411 #[allow(clippy::if_same_then_else)]
10412 if !table_and_joins.joins.is_empty() {
10413 self.expect_token(&Token::RParen)?;
10414 let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
10415 Ok(TableFactor::NestedJoin {
10416 table_with_joins: Box::new(table_and_joins),
10417 alias,
10418 }) } else if let TableFactor::NestedJoin {
10420 table_with_joins: _,
10421 alias: _,
10422 } = &table_and_joins.relation
10423 {
10424 self.expect_token(&Token::RParen)?;
10427 let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
10428 Ok(TableFactor::NestedJoin {
10429 table_with_joins: Box::new(table_and_joins),
10430 alias,
10431 })
10432 } else if dialect_of!(self is SnowflakeDialect | GenericDialect) {
10433 self.expect_token(&Token::RParen)?;
10440
10441 if let Some(outer_alias) =
10442 self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?
10443 {
10444 match &mut table_and_joins.relation {
10447 TableFactor::Derived { alias, .. }
10448 | TableFactor::Table { alias, .. }
10449 | TableFactor::Function { alias, .. }
10450 | TableFactor::UNNEST { alias, .. }
10451 | TableFactor::JsonTable { alias, .. }
10452 | TableFactor::OpenJsonTable { alias, .. }
10453 | TableFactor::TableFunction { alias, .. }
10454 | TableFactor::Pivot { alias, .. }
10455 | TableFactor::Unpivot { alias, .. }
10456 | TableFactor::MatchRecognize { alias, .. }
10457 | TableFactor::NestedJoin { alias, .. } => {
10458 if let Some(inner_alias) = alias {
10460 return Err(ParserError::ParserError(format!(
10461 "duplicate alias {inner_alias}"
10462 )));
10463 }
10464 alias.replace(outer_alias);
10468 }
10469 };
10470 }
10471 Ok(table_and_joins.relation)
10473 } else {
10474 self.expected("joined table", self.peek_token())
10477 }
10478 } else if dialect_of!(self is SnowflakeDialect | DatabricksDialect | GenericDialect)
10479 && matches!(
10480 self.peek_tokens(),
10481 [
10482 Token::Word(Word {
10483 keyword: Keyword::VALUES,
10484 ..
10485 }),
10486 Token::LParen
10487 ]
10488 )
10489 {
10490 self.expect_keyword(Keyword::VALUES)?;
10491
10492 let values = SetExpr::Values(self.parse_values(false)?);
10496 let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
10497 Ok(TableFactor::Derived {
10498 lateral: false,
10499 subquery: Box::new(Query {
10500 with: None,
10501 body: Box::new(values),
10502 order_by: None,
10503 limit: None,
10504 limit_by: vec![],
10505 offset: None,
10506 fetch: None,
10507 locks: vec![],
10508 for_clause: None,
10509 settings: None,
10510 format_clause: None,
10511 }),
10512 alias,
10513 })
10514 } else if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
10515 && self.parse_keyword(Keyword::UNNEST)
10516 {
10517 self.expect_token(&Token::LParen)?;
10518 let array_exprs = self.parse_comma_separated(Parser::parse_expr)?;
10519 self.expect_token(&Token::RParen)?;
10520
10521 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
10522 let alias = match self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS) {
10523 Ok(Some(alias)) => Some(alias),
10524 Ok(None) => None,
10525 Err(e) => return Err(e),
10526 };
10527
10528 let with_offset = match self.expect_keywords(&[Keyword::WITH, Keyword::OFFSET]) {
10529 Ok(()) => true,
10530 Err(_) => false,
10531 };
10532
10533 let with_offset_alias = if with_offset {
10534 match self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) {
10535 Ok(Some(alias)) => Some(alias),
10536 Ok(None) => None,
10537 Err(e) => return Err(e),
10538 }
10539 } else {
10540 None
10541 };
10542
10543 Ok(TableFactor::UNNEST {
10544 alias,
10545 array_exprs,
10546 with_offset,
10547 with_offset_alias,
10548 with_ordinality,
10549 })
10550 } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) {
10551 let json_expr = self.parse_expr()?;
10552 self.expect_token(&Token::Comma)?;
10553 let json_path = self.parse_value()?;
10554 self.expect_keyword(Keyword::COLUMNS)?;
10555 self.expect_token(&Token::LParen)?;
10556 let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?;
10557 self.expect_token(&Token::RParen)?;
10558 self.expect_token(&Token::RParen)?;
10559 let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
10560 Ok(TableFactor::JsonTable {
10561 json_expr,
10562 json_path,
10563 columns,
10564 alias,
10565 })
10566 } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) {
10567 self.prev_token();
10568 self.parse_open_json_table_factor()
10569 } else {
10570 let name = self.parse_object_name(true)?;
10571
10572 let json_path = match self.peek_token().token {
10573 Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?),
10574 _ => None,
10575 };
10576
10577 let partitions: Vec<Ident> = if dialect_of!(self is MySqlDialect | GenericDialect)
10578 && self.parse_keyword(Keyword::PARTITION)
10579 {
10580 self.parse_parenthesized_identifiers()?
10581 } else {
10582 vec![]
10583 };
10584
10585 let version = self.parse_table_version()?;
10587
10588 let args = if self.consume_token(&Token::LParen) {
10590 Some(self.parse_table_function_args()?)
10591 } else {
10592 None
10593 };
10594
10595 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
10596
10597 let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
10598
10599 let mut with_hints = vec![];
10601 if self.parse_keyword(Keyword::WITH) {
10602 if self.consume_token(&Token::LParen) {
10603 with_hints = self.parse_comma_separated(Parser::parse_expr)?;
10604 self.expect_token(&Token::RParen)?;
10605 } else {
10606 self.prev_token();
10608 }
10609 };
10610
10611 let mut table = TableFactor::Table {
10612 name,
10613 alias,
10614 args,
10615 with_hints,
10616 version,
10617 partitions,
10618 with_ordinality,
10619 json_path,
10620 };
10621
10622 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) {
10623 table = match kw {
10624 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
10625 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
10626 _ => unreachable!(),
10627 }
10628 }
10629
10630 if self.dialect.supports_match_recognize()
10631 && self.parse_keyword(Keyword::MATCH_RECOGNIZE)
10632 {
10633 table = self.parse_match_recognize(table)?;
10634 }
10635
10636 Ok(table)
10637 }
10638 }
10639
10640 fn parse_open_json_table_factor(&mut self) -> Result<TableFactor, ParserError> {
10643 self.expect_token(&Token::LParen)?;
10644 let json_expr = self.parse_expr()?;
10645 let json_path = if self.consume_token(&Token::Comma) {
10646 Some(self.parse_value()?)
10647 } else {
10648 None
10649 };
10650 self.expect_token(&Token::RParen)?;
10651 let columns = if self.parse_keyword(Keyword::WITH) {
10652 self.expect_token(&Token::LParen)?;
10653 let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?;
10654 self.expect_token(&Token::RParen)?;
10655 columns
10656 } else {
10657 Vec::new()
10658 };
10659 let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
10660 Ok(TableFactor::OpenJsonTable {
10661 json_expr,
10662 json_path,
10663 columns,
10664 alias,
10665 })
10666 }
10667
10668 fn parse_match_recognize(&mut self, table: TableFactor) -> Result<TableFactor, ParserError> {
10669 self.expect_token(&Token::LParen)?;
10670
10671 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
10672 self.parse_comma_separated(Parser::parse_expr)?
10673 } else {
10674 vec![]
10675 };
10676
10677 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
10678 self.parse_comma_separated(Parser::parse_order_by_expr)?
10679 } else {
10680 vec![]
10681 };
10682
10683 let measures = if self.parse_keyword(Keyword::MEASURES) {
10684 self.parse_comma_separated(|p| {
10685 let expr = p.parse_expr()?;
10686 let _ = p.parse_keyword(Keyword::AS);
10687 let alias = p.parse_identifier(false)?;
10688 Ok(Measure { expr, alias })
10689 })?
10690 } else {
10691 vec![]
10692 };
10693
10694 let rows_per_match =
10695 if self.parse_keywords(&[Keyword::ONE, Keyword::ROW, Keyword::PER, Keyword::MATCH]) {
10696 Some(RowsPerMatch::OneRow)
10697 } else if self.parse_keywords(&[
10698 Keyword::ALL,
10699 Keyword::ROWS,
10700 Keyword::PER,
10701 Keyword::MATCH,
10702 ]) {
10703 Some(RowsPerMatch::AllRows(
10704 if self.parse_keywords(&[Keyword::SHOW, Keyword::EMPTY, Keyword::MATCHES]) {
10705 Some(EmptyMatchesMode::Show)
10706 } else if self.parse_keywords(&[
10707 Keyword::OMIT,
10708 Keyword::EMPTY,
10709 Keyword::MATCHES,
10710 ]) {
10711 Some(EmptyMatchesMode::Omit)
10712 } else if self.parse_keywords(&[
10713 Keyword::WITH,
10714 Keyword::UNMATCHED,
10715 Keyword::ROWS,
10716 ]) {
10717 Some(EmptyMatchesMode::WithUnmatched)
10718 } else {
10719 None
10720 },
10721 ))
10722 } else {
10723 None
10724 };
10725
10726 let after_match_skip =
10727 if self.parse_keywords(&[Keyword::AFTER, Keyword::MATCH, Keyword::SKIP]) {
10728 if self.parse_keywords(&[Keyword::PAST, Keyword::LAST, Keyword::ROW]) {
10729 Some(AfterMatchSkip::PastLastRow)
10730 } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) {
10731 Some(AfterMatchSkip::ToNextRow)
10732 } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) {
10733 Some(AfterMatchSkip::ToFirst(self.parse_identifier(false)?))
10734 } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
10735 Some(AfterMatchSkip::ToLast(self.parse_identifier(false)?))
10736 } else {
10737 let found = self.next_token();
10738 return self.expected("after match skip option", found);
10739 }
10740 } else {
10741 None
10742 };
10743
10744 self.expect_keyword(Keyword::PATTERN)?;
10745 let pattern = self.parse_parenthesized(Self::parse_pattern)?;
10746
10747 self.expect_keyword(Keyword::DEFINE)?;
10748
10749 let symbols = self.parse_comma_separated(|p| {
10750 let symbol = p.parse_identifier(false)?;
10751 p.expect_keyword(Keyword::AS)?;
10752 let definition = p.parse_expr()?;
10753 Ok(SymbolDefinition { symbol, definition })
10754 })?;
10755
10756 self.expect_token(&Token::RParen)?;
10757
10758 let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
10759
10760 Ok(TableFactor::MatchRecognize {
10761 table: Box::new(table),
10762 partition_by,
10763 order_by,
10764 measures,
10765 rows_per_match,
10766 after_match_skip,
10767 pattern,
10768 symbols,
10769 alias,
10770 })
10771 }
10772
10773 fn parse_base_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
10774 match self.next_token().token {
10775 Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)),
10776 Token::Placeholder(s) if s == "$" => {
10777 Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::End))
10778 }
10779 Token::LBrace => {
10780 self.expect_token(&Token::Minus)?;
10781 let symbol = self
10782 .parse_identifier(false)
10783 .map(MatchRecognizeSymbol::Named)?;
10784 self.expect_token(&Token::Minus)?;
10785 self.expect_token(&Token::RBrace)?;
10786 Ok(MatchRecognizePattern::Exclude(symbol))
10787 }
10788 Token::Word(Word {
10789 value,
10790 quote_style: None,
10791 ..
10792 }) if value == "PERMUTE" => {
10793 self.expect_token(&Token::LParen)?;
10794 let symbols = self.parse_comma_separated(|p| {
10795 p.parse_identifier(false).map(MatchRecognizeSymbol::Named)
10796 })?;
10797 self.expect_token(&Token::RParen)?;
10798 Ok(MatchRecognizePattern::Permute(symbols))
10799 }
10800 Token::LParen => {
10801 let pattern = self.parse_pattern()?;
10802 self.expect_token(&Token::RParen)?;
10803 Ok(MatchRecognizePattern::Group(Box::new(pattern)))
10804 }
10805 _ => {
10806 self.prev_token();
10807 self.parse_identifier(false)
10808 .map(MatchRecognizeSymbol::Named)
10809 .map(MatchRecognizePattern::Symbol)
10810 }
10811 }
10812 }
10813
10814 fn parse_repetition_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
10815 let mut pattern = self.parse_base_pattern()?;
10816 loop {
10817 let token = self.next_token();
10818 let quantifier = match token.token {
10819 Token::Mul => RepetitionQuantifier::ZeroOrMore,
10820 Token::Plus => RepetitionQuantifier::OneOrMore,
10821 Token::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne,
10822 Token::LBrace => {
10823 let token = self.next_token();
10825 match token.token {
10826 Token::Comma => {
10827 let next_token = self.next_token();
10828 let Token::Number(n, _) = next_token.token else {
10829 return self.expected("literal number", next_token);
10830 };
10831 self.expect_token(&Token::RBrace)?;
10832 RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?)
10833 }
10834 Token::Number(n, _) if self.consume_token(&Token::Comma) => {
10835 let next_token = self.next_token();
10836 match next_token.token {
10837 Token::Number(m, _) => {
10838 self.expect_token(&Token::RBrace)?;
10839 RepetitionQuantifier::Range(
10840 Self::parse(n, token.span.start)?,
10841 Self::parse(m, token.span.start)?,
10842 )
10843 }
10844 Token::RBrace => {
10845 RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?)
10846 }
10847 _ => {
10848 return self.expected("} or upper bound", next_token);
10849 }
10850 }
10851 }
10852 Token::Number(n, _) => {
10853 self.expect_token(&Token::RBrace)?;
10854 RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?)
10855 }
10856 _ => return self.expected("quantifier range", token),
10857 }
10858 }
10859 _ => {
10860 self.prev_token();
10861 break;
10862 }
10863 };
10864 pattern = MatchRecognizePattern::Repetition(Box::new(pattern), quantifier);
10865 }
10866 Ok(pattern)
10867 }
10868
10869 fn parse_concat_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
10870 let mut patterns = vec![self.parse_repetition_pattern()?];
10871 while !matches!(self.peek_token().token, Token::RParen | Token::Pipe) {
10872 patterns.push(self.parse_repetition_pattern()?);
10873 }
10874 match <[MatchRecognizePattern; 1]>::try_from(patterns) {
10875 Ok([pattern]) => Ok(pattern),
10876 Err(patterns) => Ok(MatchRecognizePattern::Concat(patterns)),
10877 }
10878 }
10879
10880 fn parse_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
10881 let pattern = self.parse_concat_pattern()?;
10882 if self.consume_token(&Token::Pipe) {
10883 match self.parse_pattern()? {
10884 MatchRecognizePattern::Alternation(mut patterns) => {
10886 patterns.insert(0, pattern);
10887 Ok(MatchRecognizePattern::Alternation(patterns))
10888 }
10889 next => Ok(MatchRecognizePattern::Alternation(vec![pattern, next])),
10890 }
10891 } else {
10892 Ok(pattern)
10893 }
10894 }
10895
10896 pub fn parse_table_version(&mut self) -> Result<Option<TableVersion>, ParserError> {
10900 if dialect_of!(self is BigQueryDialect | MsSqlDialect)
10901 && self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
10902 {
10903 let expr = self.parse_expr()?;
10904 Ok(Some(TableVersion::ForSystemTimeAsOf(expr)))
10905 } else {
10906 Ok(None)
10907 }
10908 }
10909
10910 pub fn parse_json_table_column_def(&mut self) -> Result<JsonTableColumn, ParserError> {
10913 if self.parse_keyword(Keyword::NESTED) {
10914 let _has_path_keyword = self.parse_keyword(Keyword::PATH);
10915 let path = self.parse_value()?;
10916 self.expect_keyword(Keyword::COLUMNS)?;
10917 let columns = self.parse_parenthesized(|p| {
10918 p.parse_comma_separated(Self::parse_json_table_column_def)
10919 })?;
10920 return Ok(JsonTableColumn::Nested(JsonTableNestedColumn {
10921 path,
10922 columns,
10923 }));
10924 }
10925 let name = self.parse_identifier(false)?;
10926 if self.parse_keyword(Keyword::FOR) {
10927 self.expect_keyword(Keyword::ORDINALITY)?;
10928 return Ok(JsonTableColumn::ForOrdinality(name));
10929 }
10930 let r#type = self.parse_data_type()?;
10931 let exists = self.parse_keyword(Keyword::EXISTS);
10932 self.expect_keyword(Keyword::PATH)?;
10933 let path = self.parse_value()?;
10934 let mut on_empty = None;
10935 let mut on_error = None;
10936 while let Some(error_handling) = self.parse_json_table_column_error_handling()? {
10937 if self.parse_keyword(Keyword::EMPTY) {
10938 on_empty = Some(error_handling);
10939 } else {
10940 self.expect_keyword(Keyword::ERROR)?;
10941 on_error = Some(error_handling);
10942 }
10943 }
10944 Ok(JsonTableColumn::Named(JsonTableNamedColumn {
10945 name,
10946 r#type,
10947 path,
10948 exists,
10949 on_empty,
10950 on_error,
10951 }))
10952 }
10953
10954 pub fn parse_openjson_table_column_def(&mut self) -> Result<OpenJsonTableColumn, ParserError> {
10962 let name = self.parse_identifier(false)?;
10963 let r#type = self.parse_data_type()?;
10964 let path = if let Token::SingleQuotedString(path) = self.peek_token().token {
10965 self.next_token();
10966 Some(path)
10967 } else {
10968 None
10969 };
10970 let as_json = self.parse_keyword(Keyword::AS);
10971 if as_json {
10972 self.expect_keyword(Keyword::JSON)?;
10973 }
10974 Ok(OpenJsonTableColumn {
10975 name,
10976 r#type,
10977 path,
10978 as_json,
10979 })
10980 }
10981
10982 fn parse_json_table_column_error_handling(
10983 &mut self,
10984 ) -> Result<Option<JsonTableColumnErrorHandling>, ParserError> {
10985 let res = if self.parse_keyword(Keyword::NULL) {
10986 JsonTableColumnErrorHandling::Null
10987 } else if self.parse_keyword(Keyword::ERROR) {
10988 JsonTableColumnErrorHandling::Error
10989 } else if self.parse_keyword(Keyword::DEFAULT) {
10990 JsonTableColumnErrorHandling::Default(self.parse_value()?)
10991 } else {
10992 return Ok(None);
10993 };
10994 self.expect_keyword(Keyword::ON)?;
10995 Ok(Some(res))
10996 }
10997
10998 pub fn parse_derived_table_factor(
10999 &mut self,
11000 lateral: IsLateral,
11001 ) -> Result<TableFactor, ParserError> {
11002 let subquery = self.parse_query()?;
11003 self.expect_token(&Token::RParen)?;
11004 let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
11005 Ok(TableFactor::Derived {
11006 lateral: match lateral {
11007 Lateral => true,
11008 NotLateral => false,
11009 },
11010 subquery,
11011 alias,
11012 })
11013 }
11014
11015 fn parse_aliased_function_call(&mut self) -> Result<ExprWithAlias, ParserError> {
11016 let function_name = match self.next_token().token {
11017 Token::Word(w) => Ok(w.value),
11018 _ => self.expected("a function identifier", self.peek_token()),
11019 }?;
11020 let expr = self.parse_function(ObjectName(vec![Ident::new(function_name)]))?;
11021 let alias = if self.parse_keyword(Keyword::AS) {
11022 Some(self.parse_identifier(false)?)
11023 } else {
11024 None
11025 };
11026
11027 Ok(ExprWithAlias { expr, alias })
11028 }
11029 pub fn parse_expr_with_alias(&mut self) -> Result<ExprWithAlias, ParserError> {
11052 let expr = self.parse_expr()?;
11053 let alias = if self.parse_keyword(Keyword::AS) {
11054 Some(self.parse_identifier(false)?)
11055 } else {
11056 None
11057 };
11058
11059 Ok(ExprWithAlias { expr, alias })
11060 }
11061
11062 pub fn parse_pivot_table_factor(
11063 &mut self,
11064 table: TableFactor,
11065 ) -> Result<TableFactor, ParserError> {
11066 self.expect_token(&Token::LParen)?;
11067 let aggregate_functions = self.parse_comma_separated(Self::parse_aliased_function_call)?;
11068 self.expect_keyword(Keyword::FOR)?;
11069 let value_column = self.parse_object_name(false)?.0;
11070 self.expect_keyword(Keyword::IN)?;
11071
11072 self.expect_token(&Token::LParen)?;
11073 let value_source = if self.parse_keyword(Keyword::ANY) {
11074 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11075 self.parse_comma_separated(Parser::parse_order_by_expr)?
11076 } else {
11077 vec![]
11078 };
11079 PivotValueSource::Any(order_by)
11080 } else if self.peek_sub_query() {
11081 PivotValueSource::Subquery(self.parse_query()?)
11082 } else {
11083 PivotValueSource::List(self.parse_comma_separated(Self::parse_expr_with_alias)?)
11084 };
11085 self.expect_token(&Token::RParen)?;
11086
11087 let default_on_null =
11088 if self.parse_keywords(&[Keyword::DEFAULT, Keyword::ON, Keyword::NULL]) {
11089 self.expect_token(&Token::LParen)?;
11090 let expr = self.parse_expr()?;
11091 self.expect_token(&Token::RParen)?;
11092 Some(expr)
11093 } else {
11094 None
11095 };
11096
11097 self.expect_token(&Token::RParen)?;
11098 let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
11099 Ok(TableFactor::Pivot {
11100 table: Box::new(table),
11101 aggregate_functions,
11102 value_column,
11103 value_source,
11104 default_on_null,
11105 alias,
11106 })
11107 }
11108
11109 pub fn parse_unpivot_table_factor(
11110 &mut self,
11111 table: TableFactor,
11112 ) -> Result<TableFactor, ParserError> {
11113 self.expect_token(&Token::LParen)?;
11114 let value = self.parse_identifier(false)?;
11115 self.expect_keyword(Keyword::FOR)?;
11116 let name = self.parse_identifier(false)?;
11117 self.expect_keyword(Keyword::IN)?;
11118 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
11119 self.expect_token(&Token::RParen)?;
11120 let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
11121 Ok(TableFactor::Unpivot {
11122 table: Box::new(table),
11123 value,
11124 name,
11125 columns,
11126 alias,
11127 })
11128 }
11129
11130 pub fn parse_join_constraint(&mut self, natural: bool) -> Result<JoinConstraint, ParserError> {
11131 if natural {
11132 Ok(JoinConstraint::Natural)
11133 } else if self.parse_keyword(Keyword::ON) {
11134 let constraint = self.parse_expr()?;
11135 Ok(JoinConstraint::On(constraint))
11136 } else if self.parse_keyword(Keyword::USING) {
11137 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
11138 Ok(JoinConstraint::Using(columns))
11139 } else {
11140 Ok(JoinConstraint::None)
11141 }
11143 }
11144
11145 pub fn parse_grant(&mut self) -> Result<Statement, ParserError> {
11147 let (privileges, objects) = self.parse_grant_revoke_privileges_objects()?;
11148
11149 self.expect_keyword(Keyword::TO)?;
11150 let grantees = self.parse_comma_separated(|p| p.parse_identifier(false))?;
11151
11152 let with_grant_option =
11153 self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
11154
11155 let granted_by = self
11156 .parse_keywords(&[Keyword::GRANTED, Keyword::BY])
11157 .then(|| self.parse_identifier(false).unwrap());
11158
11159 Ok(Statement::Grant {
11160 privileges,
11161 objects,
11162 grantees,
11163 with_grant_option,
11164 granted_by,
11165 })
11166 }
11167
11168 pub fn parse_grant_revoke_privileges_objects(
11169 &mut self,
11170 ) -> Result<(Privileges, GrantObjects), ParserError> {
11171 let privileges = if self.parse_keyword(Keyword::ALL) {
11172 Privileges::All {
11173 with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
11174 }
11175 } else {
11176 let (actions, err): (Vec<_>, Vec<_>) = self
11177 .parse_actions_list()?
11178 .into_iter()
11179 .map(|(kw, columns)| match kw {
11180 Keyword::DELETE => Ok(Action::Delete),
11181 Keyword::INSERT => Ok(Action::Insert { columns }),
11182 Keyword::REFERENCES => Ok(Action::References { columns }),
11183 Keyword::SELECT => Ok(Action::Select { columns }),
11184 Keyword::TRIGGER => Ok(Action::Trigger),
11185 Keyword::TRUNCATE => Ok(Action::Truncate),
11186 Keyword::UPDATE => Ok(Action::Update { columns }),
11187 Keyword::USAGE => Ok(Action::Usage),
11188 Keyword::CONNECT => Ok(Action::Connect),
11189 Keyword::CREATE => Ok(Action::Create),
11190 Keyword::EXECUTE => Ok(Action::Execute),
11191 Keyword::TEMPORARY => Ok(Action::Temporary),
11192 _ => Err(kw),
11196 })
11197 .partition(Result::is_ok);
11198
11199 if !err.is_empty() {
11200 let errors: Vec<Keyword> = err.into_iter().filter_map(|x| x.err()).collect();
11201 return Err(ParserError::ParserError(format!(
11202 "INTERNAL ERROR: GRANT/REVOKE unexpected keyword(s) - {errors:?}"
11203 )));
11204 }
11205 let act = actions.into_iter().filter_map(|x| x.ok()).collect();
11206 Privileges::Actions(act)
11207 };
11208
11209 self.expect_keyword(Keyword::ON)?;
11210
11211 let objects = if self.parse_keywords(&[
11212 Keyword::ALL,
11213 Keyword::TABLES,
11214 Keyword::IN,
11215 Keyword::SCHEMA,
11216 ]) {
11217 GrantObjects::AllTablesInSchema {
11218 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
11219 }
11220 } else if self.parse_keywords(&[
11221 Keyword::ALL,
11222 Keyword::SEQUENCES,
11223 Keyword::IN,
11224 Keyword::SCHEMA,
11225 ]) {
11226 GrantObjects::AllSequencesInSchema {
11227 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
11228 }
11229 } else {
11230 let object_type =
11231 self.parse_one_of_keywords(&[Keyword::SEQUENCE, Keyword::SCHEMA, Keyword::TABLE]);
11232 let objects = self.parse_comma_separated(|p| p.parse_object_name(false));
11233 match object_type {
11234 Some(Keyword::SCHEMA) => GrantObjects::Schemas(objects?),
11235 Some(Keyword::SEQUENCE) => GrantObjects::Sequences(objects?),
11236 Some(Keyword::TABLE) | None => GrantObjects::Tables(objects?),
11237 _ => unreachable!(),
11238 }
11239 };
11240
11241 Ok((privileges, objects))
11242 }
11243
11244 pub fn parse_grant_permission(&mut self) -> Result<ParsedAction, ParserError> {
11245 if let Some(kw) = self.parse_one_of_keywords(&[
11246 Keyword::CONNECT,
11247 Keyword::CREATE,
11248 Keyword::DELETE,
11249 Keyword::EXECUTE,
11250 Keyword::INSERT,
11251 Keyword::REFERENCES,
11252 Keyword::SELECT,
11253 Keyword::TEMPORARY,
11254 Keyword::TRIGGER,
11255 Keyword::TRUNCATE,
11256 Keyword::UPDATE,
11257 Keyword::USAGE,
11258 ]) {
11259 let columns = match kw {
11260 Keyword::INSERT | Keyword::REFERENCES | Keyword::SELECT | Keyword::UPDATE => {
11261 let columns = self.parse_parenthesized_column_list(Optional, false)?;
11262 if columns.is_empty() {
11263 None
11264 } else {
11265 Some(columns)
11266 }
11267 }
11268 _ => None,
11269 };
11270 Ok((kw, columns))
11271 } else {
11272 self.expected("a privilege keyword", self.peek_token())?
11273 }
11274 }
11275
11276 pub fn parse_revoke(&mut self) -> Result<Statement, ParserError> {
11278 let (privileges, objects) = self.parse_grant_revoke_privileges_objects()?;
11279
11280 self.expect_keyword(Keyword::FROM)?;
11281 let grantees = self.parse_comma_separated(|p| p.parse_identifier(false))?;
11282
11283 let granted_by = self
11284 .parse_keywords(&[Keyword::GRANTED, Keyword::BY])
11285 .then(|| self.parse_identifier(false).unwrap());
11286
11287 let loc = self.peek_token().span.start;
11288 let cascade = self.parse_keyword(Keyword::CASCADE);
11289 let restrict = self.parse_keyword(Keyword::RESTRICT);
11290 if cascade && restrict {
11291 return parser_err!("Cannot specify both CASCADE and RESTRICT in REVOKE", loc);
11292 }
11293
11294 Ok(Statement::Revoke {
11295 privileges,
11296 objects,
11297 grantees,
11298 granted_by,
11299 cascade,
11300 })
11301 }
11302
11303 pub fn parse_replace(&mut self) -> Result<Statement, ParserError> {
11305 if !dialect_of!(self is MySqlDialect | GenericDialect) {
11306 return parser_err!(
11307 "Unsupported statement REPLACE",
11308 self.peek_token().span.start
11309 );
11310 }
11311
11312 let mut insert = self.parse_insert()?;
11313 if let Statement::Insert(Insert { replace_into, .. }) = &mut insert {
11314 *replace_into = true;
11315 }
11316
11317 Ok(insert)
11318 }
11319
11320 fn parse_insert_setexpr_boxed(&mut self) -> Result<Box<SetExpr>, ParserError> {
11324 Ok(Box::new(SetExpr::Insert(self.parse_insert()?)))
11325 }
11326
11327 pub fn parse_insert(&mut self) -> Result<Statement, ParserError> {
11329 let or = self.parse_conflict_clause();
11330 let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) {
11331 None
11332 } else if self.parse_keyword(Keyword::LOW_PRIORITY) {
11333 Some(MysqlInsertPriority::LowPriority)
11334 } else if self.parse_keyword(Keyword::DELAYED) {
11335 Some(MysqlInsertPriority::Delayed)
11336 } else if self.parse_keyword(Keyword::HIGH_PRIORITY) {
11337 Some(MysqlInsertPriority::HighPriority)
11338 } else {
11339 None
11340 };
11341
11342 let ignore = dialect_of!(self is MySqlDialect | GenericDialect)
11343 && self.parse_keyword(Keyword::IGNORE);
11344
11345 let replace_into = false;
11346
11347 let overwrite = self.parse_keyword(Keyword::OVERWRITE);
11348 let into = self.parse_keyword(Keyword::INTO);
11349
11350 let local = self.parse_keyword(Keyword::LOCAL);
11351
11352 if self.parse_keyword(Keyword::DIRECTORY) {
11353 let path = self.parse_literal_string()?;
11354 let file_format = if self.parse_keywords(&[Keyword::STORED, Keyword::AS]) {
11355 Some(self.parse_file_format()?)
11356 } else {
11357 None
11358 };
11359 let source = self.parse_query()?;
11360 Ok(Statement::Directory {
11361 local,
11362 path,
11363 overwrite,
11364 file_format,
11365 source,
11366 })
11367 } else {
11368 let table = self.parse_keyword(Keyword::TABLE);
11370 let table_name = self.parse_object_name(false)?;
11371
11372 let table_alias =
11373 if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::AS) {
11374 Some(self.parse_identifier(false)?)
11375 } else {
11376 None
11377 };
11378
11379 let is_mysql = dialect_of!(self is MySqlDialect);
11380
11381 let (columns, partitioned, after_columns, source) =
11382 if self.parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES]) {
11383 (vec![], None, vec![], None)
11384 } else {
11385 let (columns, partitioned, after_columns) = if !self.peek_subquery_start() {
11386 let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?;
11387
11388 let partitioned = self.parse_insert_partition()?;
11389 let after_columns = if dialect_of!(self is HiveDialect) {
11391 self.parse_parenthesized_column_list(Optional, false)?
11392 } else {
11393 vec![]
11394 };
11395 (columns, partitioned, after_columns)
11396 } else {
11397 Default::default()
11398 };
11399
11400 let source = Some(self.parse_query()?);
11401
11402 (columns, partitioned, after_columns, source)
11403 };
11404
11405 let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect)
11406 && self.parse_keyword(Keyword::AS)
11407 {
11408 let row_alias = self.parse_object_name(false)?;
11409 let col_aliases = Some(self.parse_parenthesized_column_list(Optional, false)?);
11410 Some(InsertAliases {
11411 row_alias,
11412 col_aliases,
11413 })
11414 } else {
11415 None
11416 };
11417
11418 let on = if self.parse_keyword(Keyword::ON) {
11419 if self.parse_keyword(Keyword::CONFLICT) {
11420 let conflict_target =
11421 if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) {
11422 Some(ConflictTarget::OnConstraint(self.parse_object_name(false)?))
11423 } else if self.peek_token() == Token::LParen {
11424 Some(ConflictTarget::Columns(
11425 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
11426 ))
11427 } else {
11428 None
11429 };
11430
11431 self.expect_keyword(Keyword::DO)?;
11432 let action = if self.parse_keyword(Keyword::NOTHING) {
11433 OnConflictAction::DoNothing
11434 } else {
11435 self.expect_keyword(Keyword::UPDATE)?;
11436 self.expect_keyword(Keyword::SET)?;
11437 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
11438 let selection = if self.parse_keyword(Keyword::WHERE) {
11439 Some(self.parse_expr()?)
11440 } else {
11441 None
11442 };
11443 OnConflictAction::DoUpdate(DoUpdate {
11444 assignments,
11445 selection,
11446 })
11447 };
11448
11449 Some(OnInsert::OnConflict(OnConflict {
11450 conflict_target,
11451 action,
11452 }))
11453 } else {
11454 self.expect_keyword(Keyword::DUPLICATE)?;
11455 self.expect_keyword(Keyword::KEY)?;
11456 self.expect_keyword(Keyword::UPDATE)?;
11457 let l = self.parse_comma_separated(Parser::parse_assignment)?;
11458
11459 Some(OnInsert::DuplicateKeyUpdate(l))
11460 }
11461 } else {
11462 None
11463 };
11464
11465 let returning = if self.parse_keyword(Keyword::RETURNING) {
11466 Some(self.parse_comma_separated(Parser::parse_select_item)?)
11467 } else {
11468 None
11469 };
11470
11471 Ok(Statement::Insert(Insert {
11472 or,
11473 table_name,
11474 table_alias,
11475 ignore,
11476 into,
11477 overwrite,
11478 partitioned,
11479 columns,
11480 after_columns,
11481 source,
11482 table,
11483 on,
11484 returning,
11485 replace_into,
11486 priority,
11487 insert_alias,
11488 }))
11489 }
11490 }
11491
11492 fn peek_subquery_start(&mut self) -> bool {
11495 let [maybe_lparen, maybe_select] = self.peek_tokens();
11496 Token::LParen == maybe_lparen
11497 && matches!(maybe_select, Token::Word(w) if w.keyword == Keyword::SELECT)
11498 }
11499
11500 fn parse_conflict_clause(&mut self) -> Option<SqliteOnConflict> {
11501 if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) {
11502 Some(SqliteOnConflict::Replace)
11503 } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) {
11504 Some(SqliteOnConflict::Rollback)
11505 } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) {
11506 Some(SqliteOnConflict::Abort)
11507 } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) {
11508 Some(SqliteOnConflict::Fail)
11509 } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) {
11510 Some(SqliteOnConflict::Ignore)
11511 } else if self.parse_keyword(Keyword::REPLACE) {
11512 Some(SqliteOnConflict::Replace)
11513 } else {
11514 None
11515 }
11516 }
11517
11518 pub fn parse_insert_partition(&mut self) -> Result<Option<Vec<Expr>>, ParserError> {
11519 if self.parse_keyword(Keyword::PARTITION) {
11520 self.expect_token(&Token::LParen)?;
11521 let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?);
11522 self.expect_token(&Token::RParen)?;
11523 Ok(partition_cols)
11524 } else {
11525 Ok(None)
11526 }
11527 }
11528
11529 pub fn parse_load_data_table_format(
11530 &mut self,
11531 ) -> Result<Option<HiveLoadDataFormat>, ParserError> {
11532 if self.parse_keyword(Keyword::INPUTFORMAT) {
11533 let input_format = self.parse_expr()?;
11534 self.expect_keyword(Keyword::SERDE)?;
11535 let serde = self.parse_expr()?;
11536 Ok(Some(HiveLoadDataFormat {
11537 input_format,
11538 serde,
11539 }))
11540 } else {
11541 Ok(None)
11542 }
11543 }
11544
11545 fn parse_update_setexpr_boxed(&mut self) -> Result<Box<SetExpr>, ParserError> {
11549 Ok(Box::new(SetExpr::Update(self.parse_update()?)))
11550 }
11551
11552 pub fn parse_update(&mut self) -> Result<Statement, ParserError> {
11553 let or = self.parse_conflict_clause();
11554 let table = self.parse_table_and_joins()?;
11555 self.expect_keyword(Keyword::SET)?;
11556 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
11557 let from = if self.parse_keyword(Keyword::FROM)
11558 && dialect_of!(self is GenericDialect | PostgreSqlDialect | DuckDbDialect | BigQueryDialect | SnowflakeDialect | RedshiftSqlDialect | MsSqlDialect | SQLiteDialect )
11559 {
11560 Some(self.parse_table_and_joins()?)
11561 } else {
11562 None
11563 };
11564 let selection = if self.parse_keyword(Keyword::WHERE) {
11565 Some(self.parse_expr()?)
11566 } else {
11567 None
11568 };
11569 let returning = if self.parse_keyword(Keyword::RETURNING) {
11570 Some(self.parse_comma_separated(Parser::parse_select_item)?)
11571 } else {
11572 None
11573 };
11574 Ok(Statement::Update {
11575 table,
11576 assignments,
11577 from,
11578 selection,
11579 returning,
11580 or,
11581 })
11582 }
11583
11584 pub fn parse_assignment(&mut self) -> Result<Assignment, ParserError> {
11586 let target = self.parse_assignment_target()?;
11587 self.expect_token(&Token::Eq)?;
11588 let value = self.parse_expr()?;
11589 Ok(Assignment { target, value })
11590 }
11591
11592 pub fn parse_assignment_target(&mut self) -> Result<AssignmentTarget, ParserError> {
11594 if self.consume_token(&Token::LParen) {
11595 let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?;
11596 self.expect_token(&Token::RParen)?;
11597 Ok(AssignmentTarget::Tuple(columns))
11598 } else {
11599 let column = self.parse_object_name(false)?;
11600 Ok(AssignmentTarget::ColumnName(column))
11601 }
11602 }
11603
11604 pub fn parse_function_args(&mut self) -> Result<FunctionArg, ParserError> {
11605 let arg = if self.dialect.supports_named_fn_args_with_expr_name() {
11606 self.maybe_parse(|p| {
11607 let name = p.parse_expr()?;
11608 let operator = p.parse_function_named_arg_operator()?;
11609 let arg = p.parse_wildcard_expr()?.into();
11610 Ok(FunctionArg::ExprNamed {
11611 name,
11612 arg,
11613 operator,
11614 })
11615 })?
11616 } else {
11617 self.maybe_parse(|p| {
11618 let name = p.parse_identifier(false)?;
11619 let operator = p.parse_function_named_arg_operator()?;
11620 let arg = p.parse_wildcard_expr()?.into();
11621 Ok(FunctionArg::Named {
11622 name,
11623 arg,
11624 operator,
11625 })
11626 })?
11627 };
11628 if let Some(arg) = arg {
11629 return Ok(arg);
11630 }
11631 Ok(FunctionArg::Unnamed(self.parse_wildcard_expr()?.into()))
11632 }
11633
11634 fn parse_function_named_arg_operator(&mut self) -> Result<FunctionArgOperator, ParserError> {
11635 if self.parse_keyword(Keyword::VALUE) {
11636 return Ok(FunctionArgOperator::Value);
11637 }
11638 let tok = self.next_token();
11639 match tok.token {
11640 Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => {
11641 Ok(FunctionArgOperator::RightArrow)
11642 }
11643 Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => {
11644 Ok(FunctionArgOperator::Equals)
11645 }
11646 Token::Assignment
11647 if self
11648 .dialect
11649 .supports_named_fn_args_with_assignment_operator() =>
11650 {
11651 Ok(FunctionArgOperator::Assignment)
11652 }
11653 Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => {
11654 Ok(FunctionArgOperator::Colon)
11655 }
11656 _ => {
11657 self.prev_token();
11658 self.expected("argument operator", tok)
11659 }
11660 }
11661 }
11662
11663 pub fn parse_optional_args(&mut self) -> Result<Vec<FunctionArg>, ParserError> {
11664 if self.consume_token(&Token::RParen) {
11665 Ok(vec![])
11666 } else {
11667 let args = self.parse_comma_separated(Parser::parse_function_args)?;
11668 self.expect_token(&Token::RParen)?;
11669 Ok(args)
11670 }
11671 }
11672
11673 fn parse_table_function_args(&mut self) -> Result<TableFunctionArgs, ParserError> {
11674 if self.consume_token(&Token::RParen) {
11675 return Ok(TableFunctionArgs {
11676 args: vec![],
11677 settings: None,
11678 });
11679 }
11680 let mut args = vec![];
11681 let settings = loop {
11682 if let Some(settings) = self.parse_settings()? {
11683 break Some(settings);
11684 }
11685 args.push(self.parse_function_args()?);
11686 if self.is_parse_comma_separated_end() {
11687 break None;
11688 }
11689 };
11690 self.expect_token(&Token::RParen)?;
11691 Ok(TableFunctionArgs { args, settings })
11692 }
11693
11694 fn parse_function_argument_list(&mut self) -> Result<FunctionArgumentList, ParserError> {
11703 let mut clauses = vec![];
11704
11705 if let Some(null_clause) = self.parse_json_null_clause() {
11707 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
11708 }
11709
11710 if self.consume_token(&Token::RParen) {
11711 return Ok(FunctionArgumentList {
11712 duplicate_treatment: None,
11713 args: vec![],
11714 clauses,
11715 });
11716 }
11717
11718 let duplicate_treatment = self.parse_duplicate_treatment()?;
11719 let args = self.parse_comma_separated(Parser::parse_function_args)?;
11720
11721 if self.dialect.supports_window_function_null_treatment_arg() {
11722 if let Some(null_treatment) = self.parse_null_treatment()? {
11723 clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment));
11724 }
11725 }
11726
11727 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11728 clauses.push(FunctionArgumentClause::OrderBy(
11729 self.parse_comma_separated(Parser::parse_order_by_expr)?,
11730 ));
11731 }
11732
11733 if self.parse_keyword(Keyword::LIMIT) {
11734 clauses.push(FunctionArgumentClause::Limit(self.parse_expr()?));
11735 }
11736
11737 if dialect_of!(self is GenericDialect | BigQueryDialect)
11738 && self.parse_keyword(Keyword::HAVING)
11739 {
11740 let kind = match self.expect_one_of_keywords(&[Keyword::MIN, Keyword::MAX])? {
11741 Keyword::MIN => HavingBoundKind::Min,
11742 Keyword::MAX => HavingBoundKind::Max,
11743 _ => unreachable!(),
11744 };
11745 clauses.push(FunctionArgumentClause::Having(HavingBound(
11746 kind,
11747 self.parse_expr()?,
11748 )))
11749 }
11750
11751 if dialect_of!(self is GenericDialect | MySqlDialect)
11752 && self.parse_keyword(Keyword::SEPARATOR)
11753 {
11754 clauses.push(FunctionArgumentClause::Separator(self.parse_value()?));
11755 }
11756
11757 if let Some(on_overflow) = self.parse_listagg_on_overflow()? {
11758 clauses.push(FunctionArgumentClause::OnOverflow(on_overflow));
11759 }
11760
11761 if let Some(null_clause) = self.parse_json_null_clause() {
11762 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
11763 }
11764
11765 self.expect_token(&Token::RParen)?;
11766 Ok(FunctionArgumentList {
11767 duplicate_treatment,
11768 args,
11769 clauses,
11770 })
11771 }
11772
11773 fn parse_json_null_clause(&mut self) -> Option<JsonNullClause> {
11775 if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) {
11776 Some(JsonNullClause::AbsentOnNull)
11777 } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) {
11778 Some(JsonNullClause::NullOnNull)
11779 } else {
11780 None
11781 }
11782 }
11783
11784 fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> {
11785 let loc = self.peek_token().span.start;
11786 match (
11787 self.parse_keyword(Keyword::ALL),
11788 self.parse_keyword(Keyword::DISTINCT),
11789 ) {
11790 (true, false) => Ok(Some(DuplicateTreatment::All)),
11791 (false, true) => Ok(Some(DuplicateTreatment::Distinct)),
11792 (false, false) => Ok(None),
11793 (true, true) => parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc),
11794 }
11795 }
11796
11797 pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
11799 match self.parse_wildcard_expr()? {
11800 Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard(
11801 prefix,
11802 self.parse_wildcard_additional_options(token.0)?,
11803 )),
11804 Expr::Wildcard(token) => Ok(SelectItem::Wildcard(
11805 self.parse_wildcard_additional_options(token.0)?,
11806 )),
11807 Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => {
11808 parser_err!(
11809 format!("Expected an expression, found: {}", v),
11810 self.peek_token().span.start
11811 )
11812 }
11813 Expr::BinaryOp {
11814 left,
11815 op: BinaryOperator::Eq,
11816 right,
11817 } if self.dialect.supports_eq_alias_assignment()
11818 && matches!(left.as_ref(), Expr::Identifier(_)) =>
11819 {
11820 let Expr::Identifier(alias) = *left else {
11821 return parser_err!(
11822 "BUG: expected identifier expression as alias",
11823 self.peek_token().span.start
11824 );
11825 };
11826 Ok(SelectItem::ExprWithAlias {
11827 expr: *right,
11828 alias,
11829 })
11830 }
11831 expr => self
11832 .parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS)
11833 .map(|alias| match alias {
11834 Some(alias) => SelectItem::ExprWithAlias { expr, alias },
11835 None => SelectItem::UnnamedExpr(expr),
11836 }),
11837 }
11838 }
11839
11840 pub fn parse_wildcard_additional_options(
11844 &mut self,
11845 wildcard_token: TokenWithSpan,
11846 ) -> Result<WildcardAdditionalOptions, ParserError> {
11847 let opt_ilike = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
11848 self.parse_optional_select_item_ilike()?
11849 } else {
11850 None
11851 };
11852 let opt_exclude = if opt_ilike.is_none()
11853 && dialect_of!(self is GenericDialect | DuckDbDialect | SnowflakeDialect)
11854 {
11855 self.parse_optional_select_item_exclude()?
11856 } else {
11857 None
11858 };
11859 let opt_except = if self.dialect.supports_select_wildcard_except() {
11860 self.parse_optional_select_item_except()?
11861 } else {
11862 None
11863 };
11864 let opt_replace = if dialect_of!(self is GenericDialect | BigQueryDialect | ClickHouseDialect | DuckDbDialect | SnowflakeDialect)
11865 {
11866 self.parse_optional_select_item_replace()?
11867 } else {
11868 None
11869 };
11870 let opt_rename = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
11871 self.parse_optional_select_item_rename()?
11872 } else {
11873 None
11874 };
11875
11876 Ok(WildcardAdditionalOptions {
11877 wildcard_token: wildcard_token.into(),
11878 opt_ilike,
11879 opt_exclude,
11880 opt_except,
11881 opt_rename,
11882 opt_replace,
11883 })
11884 }
11885
11886 pub fn parse_optional_select_item_ilike(
11890 &mut self,
11891 ) -> Result<Option<IlikeSelectItem>, ParserError> {
11892 let opt_ilike = if self.parse_keyword(Keyword::ILIKE) {
11893 let next_token = self.next_token();
11894 let pattern = match next_token.token {
11895 Token::SingleQuotedString(s) => s,
11896 _ => return self.expected("ilike pattern", next_token),
11897 };
11898 Some(IlikeSelectItem { pattern })
11899 } else {
11900 None
11901 };
11902 Ok(opt_ilike)
11903 }
11904
11905 pub fn parse_optional_select_item_exclude(
11909 &mut self,
11910 ) -> Result<Option<ExcludeSelectItem>, ParserError> {
11911 let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) {
11912 if self.consume_token(&Token::LParen) {
11913 let columns =
11914 self.parse_comma_separated(|parser| parser.parse_identifier(false))?;
11915 self.expect_token(&Token::RParen)?;
11916 Some(ExcludeSelectItem::Multiple(columns))
11917 } else {
11918 let column = self.parse_identifier(false)?;
11919 Some(ExcludeSelectItem::Single(column))
11920 }
11921 } else {
11922 None
11923 };
11924
11925 Ok(opt_exclude)
11926 }
11927
11928 pub fn parse_optional_select_item_except(
11932 &mut self,
11933 ) -> Result<Option<ExceptSelectItem>, ParserError> {
11934 let opt_except = if self.parse_keyword(Keyword::EXCEPT) {
11935 if self.peek_token().token == Token::LParen {
11936 let idents = self.parse_parenthesized_column_list(Mandatory, false)?;
11937 match &idents[..] {
11938 [] => {
11939 return self.expected(
11940 "at least one column should be parsed by the expect clause",
11941 self.peek_token(),
11942 )?;
11943 }
11944 [first, idents @ ..] => Some(ExceptSelectItem {
11945 first_element: first.clone(),
11946 additional_elements: idents.to_vec(),
11947 }),
11948 }
11949 } else {
11950 let ident = self.parse_identifier(false)?;
11952 Some(ExceptSelectItem {
11953 first_element: ident,
11954 additional_elements: vec![],
11955 })
11956 }
11957 } else {
11958 None
11959 };
11960
11961 Ok(opt_except)
11962 }
11963
11964 pub fn parse_optional_select_item_rename(
11966 &mut self,
11967 ) -> Result<Option<RenameSelectItem>, ParserError> {
11968 let opt_rename = if self.parse_keyword(Keyword::RENAME) {
11969 if self.consume_token(&Token::LParen) {
11970 let idents =
11971 self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?;
11972 self.expect_token(&Token::RParen)?;
11973 Some(RenameSelectItem::Multiple(idents))
11974 } else {
11975 let ident = self.parse_identifier_with_alias()?;
11976 Some(RenameSelectItem::Single(ident))
11977 }
11978 } else {
11979 None
11980 };
11981
11982 Ok(opt_rename)
11983 }
11984
11985 pub fn parse_optional_select_item_replace(
11987 &mut self,
11988 ) -> Result<Option<ReplaceSelectItem>, ParserError> {
11989 let opt_replace = if self.parse_keyword(Keyword::REPLACE) {
11990 if self.consume_token(&Token::LParen) {
11991 let items = self.parse_comma_separated(|parser| {
11992 Ok(Box::new(parser.parse_replace_elements()?))
11993 })?;
11994 self.expect_token(&Token::RParen)?;
11995 Some(ReplaceSelectItem { items })
11996 } else {
11997 let tok = self.next_token();
11998 return self.expected("( after REPLACE but", tok);
11999 }
12000 } else {
12001 None
12002 };
12003
12004 Ok(opt_replace)
12005 }
12006 pub fn parse_replace_elements(&mut self) -> Result<ReplaceSelectElement, ParserError> {
12007 let expr = self.parse_expr()?;
12008 let as_keyword = self.parse_keyword(Keyword::AS);
12009 let ident = self.parse_identifier(false)?;
12010 Ok(ReplaceSelectElement {
12011 expr,
12012 column_name: ident,
12013 as_keyword,
12014 })
12015 }
12016
12017 pub fn parse_asc_desc(&mut self) -> Option<bool> {
12020 if self.parse_keyword(Keyword::ASC) {
12021 Some(true)
12022 } else if self.parse_keyword(Keyword::DESC) {
12023 Some(false)
12024 } else {
12025 None
12026 }
12027 }
12028
12029 pub fn parse_order_by_expr(&mut self) -> Result<OrderByExpr, ParserError> {
12031 let expr = self.parse_expr()?;
12032
12033 let asc = self.parse_asc_desc();
12034
12035 let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
12036 Some(true)
12037 } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
12038 Some(false)
12039 } else {
12040 None
12041 };
12042
12043 let with_fill = if dialect_of!(self is ClickHouseDialect | GenericDialect)
12044 && self.parse_keywords(&[Keyword::WITH, Keyword::FILL])
12045 {
12046 Some(self.parse_with_fill()?)
12047 } else {
12048 None
12049 };
12050
12051 Ok(OrderByExpr {
12052 expr,
12053 asc,
12054 nulls_first,
12055 with_fill,
12056 })
12057 }
12058
12059 pub fn parse_with_fill(&mut self) -> Result<WithFill, ParserError> {
12062 let from = if self.parse_keyword(Keyword::FROM) {
12063 Some(self.parse_expr()?)
12064 } else {
12065 None
12066 };
12067
12068 let to = if self.parse_keyword(Keyword::TO) {
12069 Some(self.parse_expr()?)
12070 } else {
12071 None
12072 };
12073
12074 let step = if self.parse_keyword(Keyword::STEP) {
12075 Some(self.parse_expr()?)
12076 } else {
12077 None
12078 };
12079
12080 Ok(WithFill { from, to, step })
12081 }
12082
12083 pub fn parse_interpolations(&mut self) -> Result<Option<Interpolate>, ParserError> {
12086 if !self.parse_keyword(Keyword::INTERPOLATE) {
12087 return Ok(None);
12088 }
12089
12090 if self.consume_token(&Token::LParen) {
12091 let interpolations =
12092 self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?;
12093 self.expect_token(&Token::RParen)?;
12094 return Ok(Some(Interpolate {
12096 exprs: Some(interpolations),
12097 }));
12098 }
12099
12100 Ok(Some(Interpolate { exprs: None }))
12102 }
12103
12104 pub fn parse_interpolation(&mut self) -> Result<InterpolateExpr, ParserError> {
12106 let column = self.parse_identifier(false)?;
12107 let expr = if self.parse_keyword(Keyword::AS) {
12108 Some(self.parse_expr()?)
12109 } else {
12110 None
12111 };
12112 Ok(InterpolateExpr { column, expr })
12113 }
12114
12115 pub fn parse_top(&mut self) -> Result<Top, ParserError> {
12118 let quantity = if self.consume_token(&Token::LParen) {
12119 let quantity = self.parse_expr()?;
12120 self.expect_token(&Token::RParen)?;
12121 Some(TopQuantity::Expr(quantity))
12122 } else {
12123 let next_token = self.next_token();
12124 let quantity = match next_token.token {
12125 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start)?,
12126 _ => self.expected("literal int", next_token)?,
12127 };
12128 Some(TopQuantity::Constant(quantity))
12129 };
12130
12131 let percent = self.parse_keyword(Keyword::PERCENT);
12132
12133 let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
12134
12135 Ok(Top {
12136 with_ties,
12137 percent,
12138 quantity,
12139 })
12140 }
12141
12142 pub fn parse_limit(&mut self) -> Result<Option<Expr>, ParserError> {
12144 if self.parse_keyword(Keyword::ALL) {
12145 Ok(None)
12146 } else {
12147 Ok(Some(self.parse_expr()?))
12148 }
12149 }
12150
12151 pub fn parse_offset(&mut self) -> Result<Offset, ParserError> {
12153 let value = self.parse_expr()?;
12154 let rows = if self.parse_keyword(Keyword::ROW) {
12155 OffsetRows::Row
12156 } else if self.parse_keyword(Keyword::ROWS) {
12157 OffsetRows::Rows
12158 } else {
12159 OffsetRows::None
12160 };
12161 Ok(Offset { value, rows })
12162 }
12163
12164 pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
12166 self.expect_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT])?;
12167 let (quantity, percent) = if self
12168 .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
12169 .is_some()
12170 {
12171 (None, false)
12172 } else {
12173 let quantity = Expr::Value(self.parse_value()?);
12174 let percent = self.parse_keyword(Keyword::PERCENT);
12175 self.expect_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])?;
12176 (Some(quantity), percent)
12177 };
12178 let with_ties = if self.parse_keyword(Keyword::ONLY) {
12179 false
12180 } else if self.parse_keywords(&[Keyword::WITH, Keyword::TIES]) {
12181 true
12182 } else {
12183 return self.expected("one of ONLY or WITH TIES", self.peek_token());
12184 };
12185 Ok(Fetch {
12186 with_ties,
12187 percent,
12188 quantity,
12189 })
12190 }
12191
12192 pub fn parse_lock(&mut self) -> Result<LockClause, ParserError> {
12194 let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? {
12195 Keyword::UPDATE => LockType::Update,
12196 Keyword::SHARE => LockType::Share,
12197 _ => unreachable!(),
12198 };
12199 let of = if self.parse_keyword(Keyword::OF) {
12200 Some(self.parse_object_name(false)?)
12201 } else {
12202 None
12203 };
12204 let nonblock = if self.parse_keyword(Keyword::NOWAIT) {
12205 Some(NonBlock::Nowait)
12206 } else if self.parse_keywords(&[Keyword::SKIP, Keyword::LOCKED]) {
12207 Some(NonBlock::SkipLocked)
12208 } else {
12209 None
12210 };
12211 Ok(LockClause {
12212 lock_type,
12213 of,
12214 nonblock,
12215 })
12216 }
12217
12218 pub fn parse_values(&mut self, allow_empty: bool) -> Result<Values, ParserError> {
12219 let mut explicit_row = false;
12220
12221 let rows = self.parse_comma_separated(|parser| {
12222 if parser.parse_keyword(Keyword::ROW) {
12223 explicit_row = true;
12224 }
12225
12226 parser.expect_token(&Token::LParen)?;
12227 if allow_empty && parser.peek_token().token == Token::RParen {
12228 parser.next_token();
12229 Ok(vec![])
12230 } else {
12231 let exprs = parser.parse_comma_separated(Parser::parse_expr)?;
12232 parser.expect_token(&Token::RParen)?;
12233 Ok(exprs)
12234 }
12235 })?;
12236 Ok(Values { explicit_row, rows })
12237 }
12238
12239 pub fn parse_start_transaction(&mut self) -> Result<Statement, ParserError> {
12240 self.expect_keyword(Keyword::TRANSACTION)?;
12241 Ok(Statement::StartTransaction {
12242 modes: self.parse_transaction_modes()?,
12243 begin: false,
12244 transaction: Some(BeginTransactionKind::Transaction),
12245 modifier: None,
12246 })
12247 }
12248
12249 pub fn parse_begin(&mut self) -> Result<Statement, ParserError> {
12250 let modifier = if !self.dialect.supports_start_transaction_modifier() {
12251 None
12252 } else if self.parse_keyword(Keyword::DEFERRED) {
12253 Some(TransactionModifier::Deferred)
12254 } else if self.parse_keyword(Keyword::IMMEDIATE) {
12255 Some(TransactionModifier::Immediate)
12256 } else if self.parse_keyword(Keyword::EXCLUSIVE) {
12257 Some(TransactionModifier::Exclusive)
12258 } else {
12259 None
12260 };
12261 let transaction = match self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]) {
12262 Some(Keyword::TRANSACTION) => Some(BeginTransactionKind::Transaction),
12263 Some(Keyword::WORK) => Some(BeginTransactionKind::Work),
12264 _ => None,
12265 };
12266 Ok(Statement::StartTransaction {
12267 modes: self.parse_transaction_modes()?,
12268 begin: true,
12269 transaction,
12270 modifier,
12271 })
12272 }
12273
12274 pub fn parse_end(&mut self) -> Result<Statement, ParserError> {
12275 Ok(Statement::Commit {
12276 chain: self.parse_commit_rollback_chain()?,
12277 })
12278 }
12279
12280 pub fn parse_transaction_modes(&mut self) -> Result<Vec<TransactionMode>, ParserError> {
12281 let mut modes = vec![];
12282 let mut required = false;
12283 loop {
12284 let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
12285 let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
12286 TransactionIsolationLevel::ReadUncommitted
12287 } else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
12288 TransactionIsolationLevel::ReadCommitted
12289 } else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
12290 TransactionIsolationLevel::RepeatableRead
12291 } else if self.parse_keyword(Keyword::SERIALIZABLE) {
12292 TransactionIsolationLevel::Serializable
12293 } else {
12294 self.expected("isolation level", self.peek_token())?
12295 };
12296 TransactionMode::IsolationLevel(iso_level)
12297 } else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
12298 TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
12299 } else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
12300 TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
12301 } else if required {
12302 self.expected("transaction mode", self.peek_token())?
12303 } else {
12304 break;
12305 };
12306 modes.push(mode);
12307 required = self.consume_token(&Token::Comma);
12312 }
12313 Ok(modes)
12314 }
12315
12316 pub fn parse_commit(&mut self) -> Result<Statement, ParserError> {
12317 Ok(Statement::Commit {
12318 chain: self.parse_commit_rollback_chain()?,
12319 })
12320 }
12321
12322 pub fn parse_rollback(&mut self) -> Result<Statement, ParserError> {
12323 let chain = self.parse_commit_rollback_chain()?;
12324 let savepoint = self.parse_rollback_savepoint()?;
12325
12326 Ok(Statement::Rollback { chain, savepoint })
12327 }
12328
12329 pub fn parse_commit_rollback_chain(&mut self) -> Result<bool, ParserError> {
12330 let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]);
12331 if self.parse_keyword(Keyword::AND) {
12332 let chain = !self.parse_keyword(Keyword::NO);
12333 self.expect_keyword(Keyword::CHAIN)?;
12334 Ok(chain)
12335 } else {
12336 Ok(false)
12337 }
12338 }
12339
12340 pub fn parse_rollback_savepoint(&mut self) -> Result<Option<Ident>, ParserError> {
12341 if self.parse_keyword(Keyword::TO) {
12342 let _ = self.parse_keyword(Keyword::SAVEPOINT);
12343 let savepoint = self.parse_identifier(false)?;
12344
12345 Ok(Some(savepoint))
12346 } else {
12347 Ok(None)
12348 }
12349 }
12350
12351 pub fn parse_deallocate(&mut self) -> Result<Statement, ParserError> {
12352 let prepare = self.parse_keyword(Keyword::PREPARE);
12353 let name = self.parse_identifier(false)?;
12354 Ok(Statement::Deallocate { name, prepare })
12355 }
12356
12357 pub fn parse_execute(&mut self) -> Result<Statement, ParserError> {
12358 let name = self.parse_object_name(false)?;
12359
12360 let has_parentheses = self.consume_token(&Token::LParen);
12361
12362 let end_token = match (has_parentheses, self.peek_token().token) {
12363 (true, _) => Token::RParen,
12364 (false, Token::EOF) => Token::EOF,
12365 (false, Token::Word(w)) if w.keyword == Keyword::USING => Token::Word(w),
12366 (false, _) => Token::SemiColon,
12367 };
12368
12369 let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?;
12370
12371 if has_parentheses {
12372 self.expect_token(&Token::RParen)?;
12373 }
12374
12375 let mut using = vec![];
12376 if self.parse_keyword(Keyword::USING) {
12377 using.push(self.parse_expr()?);
12378
12379 while self.consume_token(&Token::Comma) {
12380 using.push(self.parse_expr()?);
12381 }
12382 };
12383
12384 Ok(Statement::Execute {
12385 name,
12386 parameters,
12387 has_parentheses,
12388 using,
12389 })
12390 }
12391
12392 pub fn parse_prepare(&mut self) -> Result<Statement, ParserError> {
12393 let name = self.parse_identifier(false)?;
12394
12395 let mut data_types = vec![];
12396 if self.consume_token(&Token::LParen) {
12397 data_types = self.parse_comma_separated(Parser::parse_data_type)?;
12398 self.expect_token(&Token::RParen)?;
12399 }
12400
12401 self.expect_keyword(Keyword::AS)?;
12402 let statement = Box::new(self.parse_statement()?);
12403 Ok(Statement::Prepare {
12404 name,
12405 data_types,
12406 statement,
12407 })
12408 }
12409
12410 pub fn parse_unload(&mut self) -> Result<Statement, ParserError> {
12411 self.expect_token(&Token::LParen)?;
12412 let query = self.parse_query()?;
12413 self.expect_token(&Token::RParen)?;
12414
12415 self.expect_keyword(Keyword::TO)?;
12416 let to = self.parse_identifier(false)?;
12417
12418 let with_options = self.parse_options(Keyword::WITH)?;
12419
12420 Ok(Statement::Unload {
12421 query,
12422 to,
12423 with: with_options,
12424 })
12425 }
12426
12427 pub fn parse_merge_clauses(&mut self) -> Result<Vec<MergeClause>, ParserError> {
12428 let mut clauses = vec![];
12429 loop {
12430 if self.peek_token() == Token::EOF || self.peek_token() == Token::SemiColon {
12431 break;
12432 }
12433 self.expect_keyword(Keyword::WHEN)?;
12434
12435 let mut clause_kind = MergeClauseKind::Matched;
12436 if self.parse_keyword(Keyword::NOT) {
12437 clause_kind = MergeClauseKind::NotMatched;
12438 }
12439 self.expect_keyword(Keyword::MATCHED)?;
12440
12441 if matches!(clause_kind, MergeClauseKind::NotMatched)
12442 && self.parse_keywords(&[Keyword::BY, Keyword::SOURCE])
12443 {
12444 clause_kind = MergeClauseKind::NotMatchedBySource;
12445 } else if matches!(clause_kind, MergeClauseKind::NotMatched)
12446 && self.parse_keywords(&[Keyword::BY, Keyword::TARGET])
12447 {
12448 clause_kind = MergeClauseKind::NotMatchedByTarget;
12449 }
12450
12451 let predicate = if self.parse_keyword(Keyword::AND) {
12452 Some(self.parse_expr()?)
12453 } else {
12454 None
12455 };
12456
12457 self.expect_keyword(Keyword::THEN)?;
12458
12459 let merge_clause = match self.parse_one_of_keywords(&[
12460 Keyword::UPDATE,
12461 Keyword::INSERT,
12462 Keyword::DELETE,
12463 ]) {
12464 Some(Keyword::UPDATE) => {
12465 if matches!(
12466 clause_kind,
12467 MergeClauseKind::NotMatched | MergeClauseKind::NotMatchedByTarget
12468 ) {
12469 return Err(ParserError::ParserError(format!(
12470 "UPDATE is not allowed in a {clause_kind} merge clause"
12471 )));
12472 }
12473 self.expect_keyword(Keyword::SET)?;
12474 MergeAction::Update {
12475 assignments: self.parse_comma_separated(Parser::parse_assignment)?,
12476 }
12477 }
12478 Some(Keyword::DELETE) => {
12479 if matches!(
12480 clause_kind,
12481 MergeClauseKind::NotMatched | MergeClauseKind::NotMatchedByTarget
12482 ) {
12483 return Err(ParserError::ParserError(format!(
12484 "DELETE is not allowed in a {clause_kind} merge clause"
12485 )));
12486 }
12487 MergeAction::Delete
12488 }
12489 Some(Keyword::INSERT) => {
12490 if !matches!(
12491 clause_kind,
12492 MergeClauseKind::NotMatched | MergeClauseKind::NotMatchedByTarget
12493 ) {
12494 return Err(ParserError::ParserError(format!(
12495 "INSERT is not allowed in a {clause_kind} merge clause"
12496 )));
12497 }
12498 let is_mysql = dialect_of!(self is MySqlDialect);
12499
12500 let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?;
12501 let kind = if dialect_of!(self is BigQueryDialect | GenericDialect)
12502 && self.parse_keyword(Keyword::ROW)
12503 {
12504 MergeInsertKind::Row
12505 } else {
12506 self.expect_keyword(Keyword::VALUES)?;
12507 let values = self.parse_values(is_mysql)?;
12508 MergeInsertKind::Values(values)
12509 };
12510 MergeAction::Insert(MergeInsertExpr { columns, kind })
12511 }
12512 _ => {
12513 return Err(ParserError::ParserError(
12514 "expected UPDATE, DELETE or INSERT in merge clause".to_string(),
12515 ));
12516 }
12517 };
12518 clauses.push(MergeClause {
12519 clause_kind,
12520 predicate,
12521 action: merge_clause,
12522 });
12523 }
12524 Ok(clauses)
12525 }
12526
12527 pub fn parse_merge(&mut self) -> Result<Statement, ParserError> {
12528 let into = self.parse_keyword(Keyword::INTO);
12529
12530 let table = self.parse_table_factor()?;
12531
12532 self.expect_keyword(Keyword::USING)?;
12533 let source = self.parse_table_factor()?;
12534 self.expect_keyword(Keyword::ON)?;
12535 let on = self.parse_expr()?;
12536 let clauses = self.parse_merge_clauses()?;
12537
12538 Ok(Statement::Merge {
12539 into,
12540 table,
12541 source,
12542 on: Box::new(on),
12543 clauses,
12544 })
12545 }
12546
12547 fn parse_pragma_value(&mut self) -> Result<Value, ParserError> {
12548 match self.parse_value()? {
12549 v @ Value::SingleQuotedString(_) => Ok(v),
12550 v @ Value::DoubleQuotedString(_) => Ok(v),
12551 v @ Value::Number(_, _) => Ok(v),
12552 v @ Value::Placeholder(_) => Ok(v),
12553 _ => {
12554 self.prev_token();
12555 self.expected("number or string or ? placeholder", self.peek_token())
12556 }
12557 }
12558 }
12559
12560 pub fn parse_pragma(&mut self) -> Result<Statement, ParserError> {
12562 let name = self.parse_object_name(false)?;
12563 if self.consume_token(&Token::LParen) {
12564 let value = self.parse_pragma_value()?;
12565 self.expect_token(&Token::RParen)?;
12566 Ok(Statement::Pragma {
12567 name,
12568 value: Some(value),
12569 is_eq: false,
12570 })
12571 } else if self.consume_token(&Token::Eq) {
12572 Ok(Statement::Pragma {
12573 name,
12574 value: Some(self.parse_pragma_value()?),
12575 is_eq: true,
12576 })
12577 } else {
12578 Ok(Statement::Pragma {
12579 name,
12580 value: None,
12581 is_eq: false,
12582 })
12583 }
12584 }
12585
12586 pub fn parse_install(&mut self) -> Result<Statement, ParserError> {
12588 let extension_name = self.parse_identifier(false)?;
12589
12590 Ok(Statement::Install { extension_name })
12591 }
12592
12593 pub fn parse_load(&mut self) -> Result<Statement, ParserError> {
12595 if self.dialect.supports_load_extension() {
12596 let extension_name = self.parse_identifier(false)?;
12597 Ok(Statement::Load { extension_name })
12598 } else if self.parse_keyword(Keyword::DATA) && self.dialect.supports_load_data() {
12599 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
12600 self.expect_keyword(Keyword::INPATH)?;
12601 let inpath = self.parse_literal_string()?;
12602 let overwrite = self.parse_one_of_keywords(&[Keyword::OVERWRITE]).is_some();
12603 self.expect_keyword(Keyword::INTO)?;
12604 self.expect_keyword(Keyword::TABLE)?;
12605 let table_name = self.parse_object_name(false)?;
12606 let partitioned = self.parse_insert_partition()?;
12607 let table_format = self.parse_load_data_table_format()?;
12608 Ok(Statement::LoadData {
12609 local,
12610 inpath,
12611 overwrite,
12612 table_name,
12613 partitioned,
12614 table_format,
12615 })
12616 } else {
12617 self.expected(
12618 "`DATA` or an extension name after `LOAD`",
12619 self.peek_token(),
12620 )
12621 }
12622 }
12623
12624 pub fn parse_optimize_table(&mut self) -> Result<Statement, ParserError> {
12629 self.expect_keyword(Keyword::TABLE)?;
12630 let name = self.parse_object_name(false)?;
12631 let on_cluster = self.parse_optional_on_cluster()?;
12632
12633 let partition = if self.parse_keyword(Keyword::PARTITION) {
12634 if self.parse_keyword(Keyword::ID) {
12635 Some(Partition::Identifier(self.parse_identifier(false)?))
12636 } else {
12637 Some(Partition::Expr(self.parse_expr()?))
12638 }
12639 } else {
12640 None
12641 };
12642
12643 let include_final = self.parse_keyword(Keyword::FINAL);
12644 let deduplicate = if self.parse_keyword(Keyword::DEDUPLICATE) {
12645 if self.parse_keyword(Keyword::BY) {
12646 Some(Deduplicate::ByExpression(self.parse_expr()?))
12647 } else {
12648 Some(Deduplicate::All)
12649 }
12650 } else {
12651 None
12652 };
12653
12654 Ok(Statement::OptimizeTable {
12655 name,
12656 on_cluster,
12657 partition,
12658 include_final,
12659 deduplicate,
12660 })
12661 }
12662
12663 pub fn parse_create_sequence(&mut self, temporary: bool) -> Result<Statement, ParserError> {
12669 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
12671 let name = self.parse_object_name(false)?;
12673 let mut data_type: Option<DataType> = None;
12675 if self.parse_keywords(&[Keyword::AS]) {
12676 data_type = Some(self.parse_data_type()?)
12677 }
12678 let sequence_options = self.parse_create_sequence_options()?;
12679 let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) {
12681 if self.parse_keywords(&[Keyword::NONE]) {
12682 Some(ObjectName(vec![Ident::new("NONE")]))
12683 } else {
12684 Some(self.parse_object_name(false)?)
12685 }
12686 } else {
12687 None
12688 };
12689 Ok(Statement::CreateSequence {
12690 temporary,
12691 if_not_exists,
12692 name,
12693 data_type,
12694 sequence_options,
12695 owned_by,
12696 })
12697 }
12698
12699 fn parse_create_sequence_options(&mut self) -> Result<Vec<SequenceOptions>, ParserError> {
12700 let mut sequence_options = vec![];
12701 if self.parse_keywords(&[Keyword::INCREMENT]) {
12703 if self.parse_keywords(&[Keyword::BY]) {
12704 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, true));
12705 } else {
12706 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, false));
12707 }
12708 }
12709 if self.parse_keyword(Keyword::MINVALUE) {
12711 sequence_options.push(SequenceOptions::MinValue(Some(self.parse_number()?)));
12712 } else if self.parse_keywords(&[Keyword::NO, Keyword::MINVALUE]) {
12713 sequence_options.push(SequenceOptions::MinValue(None));
12714 }
12715 if self.parse_keywords(&[Keyword::MAXVALUE]) {
12717 sequence_options.push(SequenceOptions::MaxValue(Some(self.parse_number()?)));
12718 } else if self.parse_keywords(&[Keyword::NO, Keyword::MAXVALUE]) {
12719 sequence_options.push(SequenceOptions::MaxValue(None));
12720 }
12721
12722 if self.parse_keywords(&[Keyword::START]) {
12724 if self.parse_keywords(&[Keyword::WITH]) {
12725 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, true));
12726 } else {
12727 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, false));
12728 }
12729 }
12730 if self.parse_keywords(&[Keyword::CACHE]) {
12732 sequence_options.push(SequenceOptions::Cache(self.parse_number()?));
12733 }
12734 if self.parse_keywords(&[Keyword::NO, Keyword::CYCLE]) {
12736 sequence_options.push(SequenceOptions::Cycle(true));
12737 } else if self.parse_keywords(&[Keyword::CYCLE]) {
12738 sequence_options.push(SequenceOptions::Cycle(false));
12739 }
12740
12741 Ok(sequence_options)
12742 }
12743
12744 pub fn index(&self) -> usize {
12746 self.index
12747 }
12748
12749 pub fn parse_named_window(&mut self) -> Result<NamedWindowDefinition, ParserError> {
12750 let ident = self.parse_identifier(false)?;
12751 self.expect_keyword(Keyword::AS)?;
12752
12753 let window_expr = if self.consume_token(&Token::LParen) {
12754 NamedWindowExpr::WindowSpec(self.parse_window_spec()?)
12755 } else if self.dialect.supports_window_clause_named_window_reference() {
12756 NamedWindowExpr::NamedWindow(self.parse_identifier(false)?)
12757 } else {
12758 return self.expected("(", self.peek_token());
12759 };
12760
12761 Ok(NamedWindowDefinition(ident, window_expr))
12762 }
12763
12764 pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result<Statement, ParserError> {
12765 let name = self.parse_object_name(false)?;
12766 let params = self.parse_optional_procedure_parameters()?;
12767 self.expect_keyword(Keyword::AS)?;
12768 self.expect_keyword(Keyword::BEGIN)?;
12769 let statements = self.parse_statements()?;
12770 self.expect_keyword(Keyword::END)?;
12771 Ok(Statement::CreateProcedure {
12772 name,
12773 or_alter,
12774 params,
12775 body: statements,
12776 })
12777 }
12778
12779 pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
12780 let window_name = match self.peek_token().token {
12781 Token::Word(word) if word.keyword == Keyword::NoKeyword => {
12782 self.parse_optional_indent()?
12783 }
12784 _ => None,
12785 };
12786
12787 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
12788 self.parse_comma_separated(Parser::parse_expr)?
12789 } else {
12790 vec![]
12791 };
12792 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
12793 self.parse_comma_separated(Parser::parse_order_by_expr)?
12794 } else {
12795 vec![]
12796 };
12797
12798 let window_frame = if !self.consume_token(&Token::RParen) {
12799 let window_frame = self.parse_window_frame()?;
12800 self.expect_token(&Token::RParen)?;
12801 Some(window_frame)
12802 } else {
12803 None
12804 };
12805 Ok(WindowSpec {
12806 window_name,
12807 partition_by,
12808 order_by,
12809 window_frame,
12810 })
12811 }
12812
12813 pub fn parse_create_type(&mut self) -> Result<Statement, ParserError> {
12814 let name = self.parse_object_name(false)?;
12815 self.expect_keyword(Keyword::AS)?;
12816
12817 let mut attributes = vec![];
12818 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
12819 return Ok(Statement::CreateType {
12820 name,
12821 representation: UserDefinedTypeRepresentation::Composite { attributes },
12822 });
12823 }
12824
12825 loop {
12826 let attr_name = self.parse_identifier(false)?;
12827 let attr_data_type = self.parse_data_type()?;
12828 let attr_collation = if self.parse_keyword(Keyword::COLLATE) {
12829 Some(self.parse_object_name(false)?)
12830 } else {
12831 None
12832 };
12833 attributes.push(UserDefinedTypeCompositeAttributeDef {
12834 name: attr_name,
12835 data_type: attr_data_type,
12836 collation: attr_collation,
12837 });
12838 let comma = self.consume_token(&Token::Comma);
12839 if self.consume_token(&Token::RParen) {
12840 break;
12842 } else if !comma {
12843 return self.expected("',' or ')' after attribute definition", self.peek_token());
12844 }
12845 }
12846
12847 Ok(Statement::CreateType {
12848 name,
12849 representation: UserDefinedTypeRepresentation::Composite { attributes },
12850 })
12851 }
12852
12853 fn parse_parenthesized_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
12854 self.expect_token(&Token::LParen)?;
12855 let partitions = self.parse_comma_separated(|p| p.parse_identifier(false))?;
12856 self.expect_token(&Token::RParen)?;
12857 Ok(partitions)
12858 }
12859
12860 fn parse_column_position(&mut self) -> Result<Option<MySQLColumnPosition>, ParserError> {
12861 if dialect_of!(self is MySqlDialect | GenericDialect) {
12862 if self.parse_keyword(Keyword::FIRST) {
12863 Ok(Some(MySQLColumnPosition::First))
12864 } else if self.parse_keyword(Keyword::AFTER) {
12865 let ident = self.parse_identifier(false)?;
12866 Ok(Some(MySQLColumnPosition::After(ident)))
12867 } else {
12868 Ok(None)
12869 }
12870 } else {
12871 Ok(None)
12872 }
12873 }
12874
12875 pub fn into_tokens(self) -> Vec<TokenWithSpan> {
12877 self.tokens
12878 }
12879
12880 fn peek_sub_query(&mut self) -> bool {
12882 if self
12883 .parse_one_of_keywords(&[Keyword::SELECT, Keyword::WITH])
12884 .is_some()
12885 {
12886 self.prev_token();
12887 return true;
12888 }
12889 false
12890 }
12891
12892 fn parse_show_stmt_options(&mut self) -> Result<ShowStatementOptions, ParserError> {
12893 let show_in;
12894 let mut filter_position = None;
12895 if self.dialect.supports_show_like_before_in() {
12896 if let Some(filter) = self.parse_show_statement_filter()? {
12897 filter_position = Some(ShowStatementFilterPosition::Infix(filter));
12898 }
12899 show_in = self.maybe_parse_show_stmt_in()?;
12900 } else {
12901 show_in = self.maybe_parse_show_stmt_in()?;
12902 if let Some(filter) = self.parse_show_statement_filter()? {
12903 filter_position = Some(ShowStatementFilterPosition::Suffix(filter));
12904 }
12905 }
12906 let starts_with = self.maybe_parse_show_stmt_starts_with()?;
12907 let limit = self.maybe_parse_show_stmt_limit()?;
12908 let from = self.maybe_parse_show_stmt_from()?;
12909 Ok(ShowStatementOptions {
12910 filter_position,
12911 show_in,
12912 starts_with,
12913 limit,
12914 limit_from: from,
12915 })
12916 }
12917
12918 fn maybe_parse_show_stmt_in(&mut self) -> Result<Option<ShowStatementIn>, ParserError> {
12919 let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
12920 Some(Keyword::FROM) => ShowStatementInClause::FROM,
12921 Some(Keyword::IN) => ShowStatementInClause::IN,
12922 None => return Ok(None),
12923 _ => return self.expected("FROM or IN", self.peek_token()),
12924 };
12925
12926 let (parent_type, parent_name) = match self.parse_one_of_keywords(&[
12927 Keyword::ACCOUNT,
12928 Keyword::DATABASE,
12929 Keyword::SCHEMA,
12930 Keyword::TABLE,
12931 Keyword::VIEW,
12932 ]) {
12933 Some(Keyword::DATABASE)
12935 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
12936 | self.peek_keyword(Keyword::LIMIT) =>
12937 {
12938 (Some(ShowStatementInParentType::Database), None)
12939 }
12940 Some(Keyword::SCHEMA)
12941 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
12942 | self.peek_keyword(Keyword::LIMIT) =>
12943 {
12944 (Some(ShowStatementInParentType::Schema), None)
12945 }
12946 Some(parent_kw) => {
12947 let parent_name = self.maybe_parse(|p| p.parse_object_name(false))?;
12951 match parent_kw {
12952 Keyword::ACCOUNT => (Some(ShowStatementInParentType::Account), parent_name),
12953 Keyword::DATABASE => (Some(ShowStatementInParentType::Database), parent_name),
12954 Keyword::SCHEMA => (Some(ShowStatementInParentType::Schema), parent_name),
12955 Keyword::TABLE => (Some(ShowStatementInParentType::Table), parent_name),
12956 Keyword::VIEW => (Some(ShowStatementInParentType::View), parent_name),
12957 _ => {
12958 return self.expected(
12959 "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW",
12960 self.peek_token(),
12961 )
12962 }
12963 }
12964 }
12965 None => {
12966 let mut parent_name = self.parse_object_name(false)?;
12969 if self
12970 .parse_one_of_keywords(&[Keyword::FROM, Keyword::IN])
12971 .is_some()
12972 {
12973 parent_name.0.insert(0, self.parse_identifier(false)?);
12974 }
12975 (None, Some(parent_name))
12976 }
12977 };
12978
12979 Ok(Some(ShowStatementIn {
12980 clause,
12981 parent_type,
12982 parent_name,
12983 }))
12984 }
12985
12986 fn maybe_parse_show_stmt_starts_with(&mut self) -> Result<Option<Value>, ParserError> {
12987 if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) {
12988 Ok(Some(self.parse_value()?))
12989 } else {
12990 Ok(None)
12991 }
12992 }
12993
12994 fn maybe_parse_show_stmt_limit(&mut self) -> Result<Option<Expr>, ParserError> {
12995 if self.parse_keyword(Keyword::LIMIT) {
12996 Ok(self.parse_limit()?)
12997 } else {
12998 Ok(None)
12999 }
13000 }
13001
13002 fn maybe_parse_show_stmt_from(&mut self) -> Result<Option<Value>, ParserError> {
13003 if self.parse_keyword(Keyword::FROM) {
13004 Ok(Some(self.parse_value()?))
13005 } else {
13006 Ok(None)
13007 }
13008 }
13009}
13010
13011impl Word {
13012 pub fn to_ident(&self, span: Span) -> Ident {
13013 Ident {
13014 value: self.value.clone(),
13015 quote_style: self.quote_style,
13016 span,
13017 }
13018 }
13019}
13020
13021#[cfg(test)]
13022mod tests {
13023 use crate::test_utils::{all_dialects, TestedDialects};
13024
13025 use super::*;
13026
13027 #[test]
13028 fn test_prev_index() {
13029 let sql = "SELECT version";
13030 all_dialects().run_parser_method(sql, |parser| {
13031 assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
13032 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
13033 parser.prev_token();
13034 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
13035 assert_eq!(parser.next_token(), Token::make_word("version", None));
13036 parser.prev_token();
13037 assert_eq!(parser.peek_token(), Token::make_word("version", None));
13038 assert_eq!(parser.next_token(), Token::make_word("version", None));
13039 assert_eq!(parser.peek_token(), Token::EOF);
13040 parser.prev_token();
13041 assert_eq!(parser.next_token(), Token::make_word("version", None));
13042 assert_eq!(parser.next_token(), Token::EOF);
13043 assert_eq!(parser.next_token(), Token::EOF);
13044 parser.prev_token();
13045 });
13046 }
13047
13048 #[test]
13049 fn test_peek_tokens() {
13050 all_dialects().run_parser_method("SELECT foo AS bar FROM baz", |parser| {
13051 assert!(matches!(
13052 parser.peek_tokens(),
13053 [Token::Word(Word {
13054 keyword: Keyword::SELECT,
13055 ..
13056 })]
13057 ));
13058
13059 assert!(matches!(
13060 parser.peek_tokens(),
13061 [
13062 Token::Word(Word {
13063 keyword: Keyword::SELECT,
13064 ..
13065 }),
13066 Token::Word(_),
13067 Token::Word(Word {
13068 keyword: Keyword::AS,
13069 ..
13070 }),
13071 ]
13072 ));
13073
13074 for _ in 0..4 {
13075 parser.next_token();
13076 }
13077
13078 assert!(matches!(
13079 parser.peek_tokens(),
13080 [
13081 Token::Word(Word {
13082 keyword: Keyword::FROM,
13083 ..
13084 }),
13085 Token::Word(_),
13086 Token::EOF,
13087 Token::EOF,
13088 ]
13089 ))
13090 })
13091 }
13092
13093 #[cfg(test)]
13094 mod test_parse_data_type {
13095 use crate::ast::{
13096 CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ObjectName, TimezoneInfo,
13097 };
13098 use crate::dialect::{AnsiDialect, GenericDialect};
13099 use crate::test_utils::TestedDialects;
13100
13101 macro_rules! test_parse_data_type {
13102 ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{
13103 $dialect.run_parser_method(&*$input, |parser| {
13104 let data_type = parser.parse_data_type().unwrap();
13105 assert_eq!($expected_type, data_type);
13106 assert_eq!($input.to_string(), data_type.to_string());
13107 });
13108 }};
13109 }
13110
13111 #[test]
13112 fn test_ansii_character_string_types() {
13113 let dialect =
13115 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
13116
13117 test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
13118
13119 test_parse_data_type!(
13120 dialect,
13121 "CHARACTER(20)",
13122 DataType::Character(Some(CharacterLength::IntegerLength {
13123 length: 20,
13124 unit: None
13125 }))
13126 );
13127
13128 test_parse_data_type!(
13129 dialect,
13130 "CHARACTER(20 CHARACTERS)",
13131 DataType::Character(Some(CharacterLength::IntegerLength {
13132 length: 20,
13133 unit: Some(CharLengthUnits::Characters)
13134 }))
13135 );
13136
13137 test_parse_data_type!(
13138 dialect,
13139 "CHARACTER(20 OCTETS)",
13140 DataType::Character(Some(CharacterLength::IntegerLength {
13141 length: 20,
13142 unit: Some(CharLengthUnits::Octets)
13143 }))
13144 );
13145
13146 test_parse_data_type!(dialect, "CHAR", DataType::Char(None));
13147
13148 test_parse_data_type!(
13149 dialect,
13150 "CHAR(20)",
13151 DataType::Char(Some(CharacterLength::IntegerLength {
13152 length: 20,
13153 unit: None
13154 }))
13155 );
13156
13157 test_parse_data_type!(
13158 dialect,
13159 "CHAR(20 CHARACTERS)",
13160 DataType::Char(Some(CharacterLength::IntegerLength {
13161 length: 20,
13162 unit: Some(CharLengthUnits::Characters)
13163 }))
13164 );
13165
13166 test_parse_data_type!(
13167 dialect,
13168 "CHAR(20 OCTETS)",
13169 DataType::Char(Some(CharacterLength::IntegerLength {
13170 length: 20,
13171 unit: Some(CharLengthUnits::Octets)
13172 }))
13173 );
13174
13175 test_parse_data_type!(
13176 dialect,
13177 "CHARACTER VARYING(20)",
13178 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
13179 length: 20,
13180 unit: None
13181 }))
13182 );
13183
13184 test_parse_data_type!(
13185 dialect,
13186 "CHARACTER VARYING(20 CHARACTERS)",
13187 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
13188 length: 20,
13189 unit: Some(CharLengthUnits::Characters)
13190 }))
13191 );
13192
13193 test_parse_data_type!(
13194 dialect,
13195 "CHARACTER VARYING(20 OCTETS)",
13196 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
13197 length: 20,
13198 unit: Some(CharLengthUnits::Octets)
13199 }))
13200 );
13201
13202 test_parse_data_type!(
13203 dialect,
13204 "CHAR VARYING(20)",
13205 DataType::CharVarying(Some(CharacterLength::IntegerLength {
13206 length: 20,
13207 unit: None
13208 }))
13209 );
13210
13211 test_parse_data_type!(
13212 dialect,
13213 "CHAR VARYING(20 CHARACTERS)",
13214 DataType::CharVarying(Some(CharacterLength::IntegerLength {
13215 length: 20,
13216 unit: Some(CharLengthUnits::Characters)
13217 }))
13218 );
13219
13220 test_parse_data_type!(
13221 dialect,
13222 "CHAR VARYING(20 OCTETS)",
13223 DataType::CharVarying(Some(CharacterLength::IntegerLength {
13224 length: 20,
13225 unit: Some(CharLengthUnits::Octets)
13226 }))
13227 );
13228
13229 test_parse_data_type!(
13230 dialect,
13231 "VARCHAR(20)",
13232 DataType::Varchar(Some(CharacterLength::IntegerLength {
13233 length: 20,
13234 unit: None
13235 }))
13236 );
13237 }
13238
13239 #[test]
13240 fn test_ansii_character_large_object_types() {
13241 let dialect =
13243 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
13244
13245 test_parse_data_type!(
13246 dialect,
13247 "CHARACTER LARGE OBJECT",
13248 DataType::CharacterLargeObject(None)
13249 );
13250 test_parse_data_type!(
13251 dialect,
13252 "CHARACTER LARGE OBJECT(20)",
13253 DataType::CharacterLargeObject(Some(20))
13254 );
13255
13256 test_parse_data_type!(
13257 dialect,
13258 "CHAR LARGE OBJECT",
13259 DataType::CharLargeObject(None)
13260 );
13261 test_parse_data_type!(
13262 dialect,
13263 "CHAR LARGE OBJECT(20)",
13264 DataType::CharLargeObject(Some(20))
13265 );
13266
13267 test_parse_data_type!(dialect, "CLOB", DataType::Clob(None));
13268 test_parse_data_type!(dialect, "CLOB(20)", DataType::Clob(Some(20)));
13269 }
13270
13271 #[test]
13272 fn test_parse_custom_types() {
13273 let dialect =
13274 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
13275
13276 test_parse_data_type!(
13277 dialect,
13278 "GEOMETRY",
13279 DataType::Custom(ObjectName(vec!["GEOMETRY".into()]), vec![])
13280 );
13281
13282 test_parse_data_type!(
13283 dialect,
13284 "GEOMETRY(POINT)",
13285 DataType::Custom(
13286 ObjectName(vec!["GEOMETRY".into()]),
13287 vec!["POINT".to_string()]
13288 )
13289 );
13290
13291 test_parse_data_type!(
13292 dialect,
13293 "GEOMETRY(POINT, 4326)",
13294 DataType::Custom(
13295 ObjectName(vec!["GEOMETRY".into()]),
13296 vec!["POINT".to_string(), "4326".to_string()]
13297 )
13298 );
13299 }
13300
13301 #[test]
13302 fn test_ansii_exact_numeric_types() {
13303 let dialect =
13305 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
13306
13307 test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
13308
13309 test_parse_data_type!(
13310 dialect,
13311 "NUMERIC(2)",
13312 DataType::Numeric(ExactNumberInfo::Precision(2))
13313 );
13314
13315 test_parse_data_type!(
13316 dialect,
13317 "NUMERIC(2,10)",
13318 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(2, 10))
13319 );
13320
13321 test_parse_data_type!(dialect, "DECIMAL", DataType::Decimal(ExactNumberInfo::None));
13322
13323 test_parse_data_type!(
13324 dialect,
13325 "DECIMAL(2)",
13326 DataType::Decimal(ExactNumberInfo::Precision(2))
13327 );
13328
13329 test_parse_data_type!(
13330 dialect,
13331 "DECIMAL(2,10)",
13332 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(2, 10))
13333 );
13334
13335 test_parse_data_type!(dialect, "DEC", DataType::Dec(ExactNumberInfo::None));
13336
13337 test_parse_data_type!(
13338 dialect,
13339 "DEC(2)",
13340 DataType::Dec(ExactNumberInfo::Precision(2))
13341 );
13342
13343 test_parse_data_type!(
13344 dialect,
13345 "DEC(2,10)",
13346 DataType::Dec(ExactNumberInfo::PrecisionAndScale(2, 10))
13347 );
13348 }
13349
13350 #[test]
13351 fn test_ansii_date_type() {
13352 let dialect =
13354 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
13355
13356 test_parse_data_type!(dialect, "DATE", DataType::Date);
13357
13358 test_parse_data_type!(dialect, "TIME", DataType::Time(None, TimezoneInfo::None));
13359
13360 test_parse_data_type!(
13361 dialect,
13362 "TIME(6)",
13363 DataType::Time(Some(6), TimezoneInfo::None)
13364 );
13365
13366 test_parse_data_type!(
13367 dialect,
13368 "TIME WITH TIME ZONE",
13369 DataType::Time(None, TimezoneInfo::WithTimeZone)
13370 );
13371
13372 test_parse_data_type!(
13373 dialect,
13374 "TIME(6) WITH TIME ZONE",
13375 DataType::Time(Some(6), TimezoneInfo::WithTimeZone)
13376 );
13377
13378 test_parse_data_type!(
13379 dialect,
13380 "TIME WITHOUT TIME ZONE",
13381 DataType::Time(None, TimezoneInfo::WithoutTimeZone)
13382 );
13383
13384 test_parse_data_type!(
13385 dialect,
13386 "TIME(6) WITHOUT TIME ZONE",
13387 DataType::Time(Some(6), TimezoneInfo::WithoutTimeZone)
13388 );
13389
13390 test_parse_data_type!(
13391 dialect,
13392 "TIMESTAMP",
13393 DataType::Timestamp(None, TimezoneInfo::None)
13394 );
13395
13396 test_parse_data_type!(
13397 dialect,
13398 "TIMESTAMP(22)",
13399 DataType::Timestamp(Some(22), TimezoneInfo::None)
13400 );
13401
13402 test_parse_data_type!(
13403 dialect,
13404 "TIMESTAMP(22) WITH TIME ZONE",
13405 DataType::Timestamp(Some(22), TimezoneInfo::WithTimeZone)
13406 );
13407
13408 test_parse_data_type!(
13409 dialect,
13410 "TIMESTAMP(33) WITHOUT TIME ZONE",
13411 DataType::Timestamp(Some(33), TimezoneInfo::WithoutTimeZone)
13412 );
13413 }
13414 }
13415
13416 #[test]
13417 fn test_parse_schema_name() {
13418 macro_rules! test_parse_schema_name {
13420 ($input:expr, $expected_name:expr $(,)?) => {{
13421 all_dialects().run_parser_method(&*$input, |parser| {
13422 let schema_name = parser.parse_schema_name().unwrap();
13423 assert_eq!(schema_name, $expected_name);
13425 assert_eq!(schema_name.to_string(), $input.to_string());
13427 });
13428 }};
13429 }
13430
13431 let dummy_name = ObjectName(vec![Ident::new("dummy_name")]);
13432 let dummy_authorization = Ident::new("dummy_authorization");
13433
13434 test_parse_schema_name!(
13435 format!("{dummy_name}"),
13436 SchemaName::Simple(dummy_name.clone())
13437 );
13438
13439 test_parse_schema_name!(
13440 format!("AUTHORIZATION {dummy_authorization}"),
13441 SchemaName::UnnamedAuthorization(dummy_authorization.clone()),
13442 );
13443 test_parse_schema_name!(
13444 format!("{dummy_name} AUTHORIZATION {dummy_authorization}"),
13445 SchemaName::NamedAuthorization(dummy_name.clone(), dummy_authorization.clone()),
13446 );
13447 }
13448
13449 #[test]
13450 fn mysql_parse_index_table_constraint() {
13451 macro_rules! test_parse_table_constraint {
13452 ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{
13453 $dialect.run_parser_method(&*$input, |parser| {
13454 let constraint = parser.parse_optional_table_constraint().unwrap().unwrap();
13455 assert_eq!(constraint, $expected);
13457 assert_eq!(constraint.to_string(), $input.to_string());
13459 });
13460 }};
13461 }
13462
13463 let dialect =
13464 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
13465
13466 test_parse_table_constraint!(
13467 dialect,
13468 "INDEX (c1)",
13469 TableConstraint::Index {
13470 display_as_key: false,
13471 name: None,
13472 index_type: None,
13473 columns: vec![Ident::new("c1")],
13474 }
13475 );
13476
13477 test_parse_table_constraint!(
13478 dialect,
13479 "KEY (c1)",
13480 TableConstraint::Index {
13481 display_as_key: true,
13482 name: None,
13483 index_type: None,
13484 columns: vec![Ident::new("c1")],
13485 }
13486 );
13487
13488 test_parse_table_constraint!(
13489 dialect,
13490 "INDEX 'index' (c1, c2)",
13491 TableConstraint::Index {
13492 display_as_key: false,
13493 name: Some(Ident::with_quote('\'', "index")),
13494 index_type: None,
13495 columns: vec![Ident::new("c1"), Ident::new("c2")],
13496 }
13497 );
13498
13499 test_parse_table_constraint!(
13500 dialect,
13501 "INDEX USING BTREE (c1)",
13502 TableConstraint::Index {
13503 display_as_key: false,
13504 name: None,
13505 index_type: Some(IndexType::BTree),
13506 columns: vec![Ident::new("c1")],
13507 }
13508 );
13509
13510 test_parse_table_constraint!(
13511 dialect,
13512 "INDEX USING HASH (c1)",
13513 TableConstraint::Index {
13514 display_as_key: false,
13515 name: None,
13516 index_type: Some(IndexType::Hash),
13517 columns: vec![Ident::new("c1")],
13518 }
13519 );
13520
13521 test_parse_table_constraint!(
13522 dialect,
13523 "INDEX idx_name USING BTREE (c1)",
13524 TableConstraint::Index {
13525 display_as_key: false,
13526 name: Some(Ident::new("idx_name")),
13527 index_type: Some(IndexType::BTree),
13528 columns: vec![Ident::new("c1")],
13529 }
13530 );
13531
13532 test_parse_table_constraint!(
13533 dialect,
13534 "INDEX idx_name USING HASH (c1)",
13535 TableConstraint::Index {
13536 display_as_key: false,
13537 name: Some(Ident::new("idx_name")),
13538 index_type: Some(IndexType::Hash),
13539 columns: vec![Ident::new("c1")],
13540 }
13541 );
13542 }
13543
13544 #[test]
13545 fn test_tokenizer_error_loc() {
13546 let sql = "foo '";
13547 let ast = Parser::parse_sql(&GenericDialect, sql);
13548 assert_eq!(
13549 ast,
13550 Err(ParserError::TokenizerError(
13551 "Unterminated string literal at Line: 1, Column: 5".to_string()
13552 ))
13553 );
13554 }
13555
13556 #[test]
13557 fn test_parser_error_loc() {
13558 let sql = "SELECT this is a syntax error";
13559 let ast = Parser::parse_sql(&GenericDialect, sql);
13560 assert_eq!(
13561 ast,
13562 Err(ParserError::ParserError(
13563 "Expected: [NOT] NULL or TRUE|FALSE or [NOT] DISTINCT FROM after IS, found: a at Line: 1, Column: 16"
13564 .to_string()
13565 ))
13566 );
13567 }
13568
13569 #[test]
13570 fn test_nested_explain_error() {
13571 let sql = "EXPLAIN EXPLAIN SELECT 1";
13572 let ast = Parser::parse_sql(&GenericDialect, sql);
13573 assert_eq!(
13574 ast,
13575 Err(ParserError::ParserError(
13576 "Explain must be root of the plan".to_string()
13577 ))
13578 );
13579 }
13580
13581 #[test]
13582 fn test_parse_multipart_identifier_positive() {
13583 let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
13584
13585 let expected = vec![
13587 Ident {
13588 value: "CATALOG".to_string(),
13589 quote_style: None,
13590 span: Span::empty(),
13591 },
13592 Ident {
13593 value: "F(o)o. \"bar".to_string(),
13594 quote_style: Some('"'),
13595 span: Span::empty(),
13596 },
13597 Ident {
13598 value: "table".to_string(),
13599 quote_style: None,
13600 span: Span::empty(),
13601 },
13602 ];
13603 dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| {
13604 let actual = parser.parse_multipart_identifier().unwrap();
13605 assert_eq!(expected, actual);
13606 });
13607
13608 let expected = vec![
13610 Ident {
13611 value: "CATALOG".to_string(),
13612 quote_style: None,
13613 span: Span::empty(),
13614 },
13615 Ident {
13616 value: "table".to_string(),
13617 quote_style: None,
13618 span: Span::empty(),
13619 },
13620 ];
13621 dialect.run_parser_method("CATALOG . table", |parser| {
13622 let actual = parser.parse_multipart_identifier().unwrap();
13623 assert_eq!(expected, actual);
13624 });
13625 }
13626
13627 #[test]
13628 fn test_parse_multipart_identifier_negative() {
13629 macro_rules! test_parse_multipart_identifier_error {
13630 ($input:expr, $expected_err:expr $(,)?) => {{
13631 all_dialects().run_parser_method(&*$input, |parser| {
13632 let actual_err = parser.parse_multipart_identifier().unwrap_err();
13633 assert_eq!(actual_err.to_string(), $expected_err);
13634 });
13635 }};
13636 }
13637
13638 test_parse_multipart_identifier_error!(
13639 "",
13640 "sql parser error: Empty input when parsing identifier",
13641 );
13642
13643 test_parse_multipart_identifier_error!(
13644 "*schema.table",
13645 "sql parser error: Unexpected token in identifier: *",
13646 );
13647
13648 test_parse_multipart_identifier_error!(
13649 "schema.table*",
13650 "sql parser error: Unexpected token in identifier: *",
13651 );
13652
13653 test_parse_multipart_identifier_error!(
13654 "schema.table.",
13655 "sql parser error: Trailing period in identifier",
13656 );
13657
13658 test_parse_multipart_identifier_error!(
13659 "schema.*",
13660 "sql parser error: Unexpected token following period in identifier: *",
13661 );
13662 }
13663
13664 #[test]
13665 fn test_mysql_partition_selection() {
13666 let sql = "SELECT * FROM employees PARTITION (p0, p2)";
13667 let expected = vec!["p0", "p2"];
13668
13669 let ast: Vec<Statement> = Parser::parse_sql(&MySqlDialect {}, sql).unwrap();
13670 assert_eq!(ast.len(), 1);
13671 if let Statement::Query(v) = &ast[0] {
13672 if let SetExpr::Select(select) = &*v.body {
13673 assert_eq!(select.from.len(), 1);
13674 let from: &TableWithJoins = &select.from[0];
13675 let table_factor = &from.relation;
13676 if let TableFactor::Table { partitions, .. } = table_factor {
13677 let actual: Vec<&str> = partitions
13678 .iter()
13679 .map(|ident| ident.value.as_str())
13680 .collect();
13681 assert_eq!(expected, actual);
13682 }
13683 }
13684 } else {
13685 panic!("fail to parse mysql partition selection");
13686 }
13687 }
13688
13689 #[test]
13690 fn test_replace_into_placeholders() {
13691 let sql = "REPLACE INTO t (a) VALUES (&a)";
13692
13693 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
13694 }
13695
13696 #[test]
13697 fn test_replace_into_set() {
13698 let sql = "REPLACE INTO t SET a='1'";
13702
13703 assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
13704 }
13705
13706 #[test]
13707 fn test_replace_into_set_placeholder() {
13708 let sql = "REPLACE INTO t SET ?";
13709
13710 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
13711 }
13712
13713 #[test]
13714 fn test_replace_incomplete() {
13715 let sql = r#"REPLACE"#;
13716
13717 assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
13718 }
13719}