1#[cfg(not(feature = "std"))]
16use alloc::{
17 boxed::Box,
18 format,
19 string::{String, ToString},
20 vec,
21 vec::Vec,
22};
23use core::{
24 fmt::{self, Display},
25 str::FromStr,
26};
27
28use log::debug;
29
30use recursion::RecursionCounter;
31use IsLateral::*;
32use IsOptional::*;
33
34use crate::ast::helpers::stmt_create_table::{CreateTableBuilder, CreateTableConfiguration};
35use crate::ast::Statement::CreatePolicy;
36use crate::ast::*;
37use crate::dialect::*;
38use crate::keywords::{Keyword, ALL_KEYWORDS};
39use crate::tokenizer::*;
40
41mod alter;
42
43#[derive(Debug, Clone, PartialEq, Eq)]
44pub enum ParserError {
45 TokenizerError(String),
46 ParserError(String),
47 RecursionLimitExceeded,
48}
49
50type ParsedAction = (Keyword, Option<Vec<Ident>>);
52
53macro_rules! parser_err {
55 ($MSG:expr, $loc:expr) => {
56 Err(ParserError::ParserError(format!("{}{}", $MSG, $loc)))
57 };
58}
59
60#[cfg(feature = "std")]
61mod recursion {
63 use std::cell::Cell;
64 use std::rc::Rc;
65
66 use super::ParserError;
67
68 pub(crate) struct RecursionCounter {
76 remaining_depth: Rc<Cell<usize>>,
77 }
78
79 impl RecursionCounter {
80 pub fn new(remaining_depth: usize) -> Self {
83 Self {
84 remaining_depth: Rc::new(remaining_depth.into()),
85 }
86 }
87
88 pub fn try_decrease(&self) -> Result<DepthGuard, ParserError> {
95 let old_value = self.remaining_depth.get();
96 if old_value == 0 {
98 Err(ParserError::RecursionLimitExceeded)
99 } else {
100 self.remaining_depth.set(old_value - 1);
101 Ok(DepthGuard::new(Rc::clone(&self.remaining_depth)))
102 }
103 }
104 }
105
106 pub struct DepthGuard {
108 remaining_depth: Rc<Cell<usize>>,
109 }
110
111 impl DepthGuard {
112 fn new(remaining_depth: Rc<Cell<usize>>) -> Self {
113 Self { remaining_depth }
114 }
115 }
116 impl Drop for DepthGuard {
117 fn drop(&mut self) {
118 let old_value = self.remaining_depth.get();
119 self.remaining_depth.set(old_value + 1);
120 }
121 }
122}
123
124#[cfg(not(feature = "std"))]
125mod recursion {
126 pub(crate) struct RecursionCounter {}
132
133 impl RecursionCounter {
134 pub fn new(_remaining_depth: usize) -> Self {
135 Self {}
136 }
137 pub fn try_decrease(&self) -> Result<DepthGuard, super::ParserError> {
138 Ok(DepthGuard {})
139 }
140 }
141
142 pub struct DepthGuard {}
143}
144
145#[derive(PartialEq, Eq)]
146pub enum IsOptional {
147 Optional,
148 Mandatory,
149}
150
151pub enum IsLateral {
152 Lateral,
153 NotLateral,
154}
155
156pub enum WildcardExpr {
157 Expr(Expr),
158 QualifiedWildcard(ObjectName),
159 Wildcard,
160}
161
162impl From<TokenizerError> for ParserError {
163 fn from(e: TokenizerError) -> Self {
164 ParserError::TokenizerError(e.to_string())
165 }
166}
167
168impl fmt::Display for ParserError {
169 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
170 write!(
171 f,
172 "sql parser error: {}",
173 match self {
174 ParserError::TokenizerError(s) => s,
175 ParserError::ParserError(s) => s,
176 ParserError::RecursionLimitExceeded => "recursion limit exceeded",
177 }
178 )
179 }
180}
181
182#[cfg(feature = "std")]
183impl std::error::Error for ParserError {}
184
185const DEFAULT_REMAINING_DEPTH: usize = 50;
187
188struct MatchedTrailingBracket(bool);
201
202impl From<bool> for MatchedTrailingBracket {
203 fn from(value: bool) -> Self {
204 Self(value)
205 }
206}
207
208#[derive(Debug, Clone, PartialEq, Eq)]
210pub struct ParserOptions {
211 pub trailing_commas: bool,
212 pub unescape: bool,
215}
216
217impl Default for ParserOptions {
218 fn default() -> Self {
219 Self {
220 trailing_commas: false,
221 unescape: true,
222 }
223 }
224}
225
226impl ParserOptions {
227 pub fn new() -> Self {
229 Default::default()
230 }
231
232 pub fn with_trailing_commas(mut self, trailing_commas: bool) -> Self {
244 self.trailing_commas = trailing_commas;
245 self
246 }
247
248 pub fn with_unescape(mut self, unescape: bool) -> Self {
251 self.unescape = unescape;
252 self
253 }
254}
255
256#[derive(Copy, Clone)]
257enum ParserState {
258 Normal,
260 ConnectBy,
264}
265
266pub struct Parser<'a> {
267 tokens: Vec<TokenWithLocation>,
268 index: usize,
270 state: ParserState,
272 dialect: &'a dyn Dialect,
274 options: ParserOptions,
278 recursion_counter: RecursionCounter,
280}
281
282impl<'a> Parser<'a> {
283 pub fn new(dialect: &'a dyn Dialect) -> Self {
299 Self {
300 tokens: vec![],
301 index: 0,
302 state: ParserState::Normal,
303 dialect,
304 recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH),
305 options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()),
306 }
307 }
308
309 pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
329 self.recursion_counter = RecursionCounter::new(recursion_limit);
330 self
331 }
332
333 pub fn with_options(mut self, options: ParserOptions) -> Self {
356 self.options = options;
357 self
358 }
359
360 pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithLocation>) -> Self {
362 self.tokens = tokens;
363 self.index = 0;
364 self
365 }
366
367 pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
369 let tokens_with_locations: Vec<TokenWithLocation> = tokens
371 .into_iter()
372 .map(|token| TokenWithLocation {
373 token,
374 location: Location { line: 0, column: 0 },
375 })
376 .collect();
377 self.with_tokens_with_locations(tokens_with_locations)
378 }
379
380 pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
387 debug!("Parsing sql '{}'...", sql);
388 let tokens = Tokenizer::new(self.dialect, sql)
389 .with_unescape(self.options.unescape)
390 .tokenize_with_location()?;
391 Ok(self.with_tokens_with_locations(tokens))
392 }
393
394 pub fn parse_statements(&mut self) -> Result<Vec<Statement>, ParserError> {
410 let mut stmts = Vec::new();
411 let mut expecting_statement_delimiter = false;
412 loop {
413 while self.consume_token(&Token::SemiColon) {
415 expecting_statement_delimiter = false;
416 }
417
418 match self.peek_token().token {
419 Token::EOF => break,
420
421 Token::Word(word) => {
423 if expecting_statement_delimiter && word.keyword == Keyword::END {
424 break;
425 }
426 }
427 _ => {}
428 }
429
430 if expecting_statement_delimiter {
431 return self.expected("end of statement", self.peek_token());
432 }
433
434 let statement = self.parse_statement()?;
435 stmts.push(statement);
436 expecting_statement_delimiter = true;
437 }
438 Ok(stmts)
439 }
440
441 pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
457 Parser::new(dialect).try_with_sql(sql)?.parse_statements()
458 }
459
460 pub fn parse_statement(&mut self) -> Result<Statement, ParserError> {
463 let _guard = self.recursion_counter.try_decrease()?;
464
465 if let Some(statement) = self.dialect.parse_statement(self) {
467 return statement;
468 }
469
470 let next_token = self.next_token();
471 match &next_token.token {
472 Token::Word(w) => match w.keyword {
473 Keyword::KILL => self.parse_kill(),
474 Keyword::FLUSH => self.parse_flush(),
475 Keyword::DESC => self.parse_explain(DescribeAlias::Desc),
476 Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe),
477 Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain),
478 Keyword::ANALYZE => self.parse_analyze(),
479 Keyword::SELECT | Keyword::WITH | Keyword::VALUES => {
480 self.prev_token();
481 self.parse_query().map(Statement::Query)
482 }
483 Keyword::TRUNCATE => self.parse_truncate(),
484 Keyword::ATTACH => {
485 if dialect_of!(self is DuckDbDialect) {
486 self.parse_attach_duckdb_database()
487 } else {
488 self.parse_attach_database()
489 }
490 }
491 Keyword::DETACH if dialect_of!(self is DuckDbDialect | GenericDialect) => {
492 self.parse_detach_duckdb_database()
493 }
494 Keyword::MSCK => self.parse_msck(),
495 Keyword::CREATE => self.parse_create(),
496 Keyword::CACHE => self.parse_cache_table(),
497 Keyword::DROP => self.parse_drop(),
498 Keyword::DISCARD => self.parse_discard(),
499 Keyword::DECLARE => self.parse_declare(),
500 Keyword::FETCH => self.parse_fetch_statement(),
501 Keyword::DELETE => self.parse_delete(),
502 Keyword::INSERT => self.parse_insert(),
503 Keyword::REPLACE => self.parse_replace(),
504 Keyword::UNCACHE => self.parse_uncache_table(),
505 Keyword::UPDATE => self.parse_update(),
506 Keyword::ALTER => self.parse_alter(),
507 Keyword::CALL => self.parse_call(),
508 Keyword::COPY => self.parse_copy(),
509 Keyword::CLOSE => self.parse_close(),
510 Keyword::SET => self.parse_set(),
511 Keyword::SHOW => self.parse_show(),
512 Keyword::USE => self.parse_use(),
513 Keyword::GRANT => self.parse_grant(),
514 Keyword::REVOKE => self.parse_revoke(),
515 Keyword::START => self.parse_start_transaction(),
516 Keyword::BEGIN => self.parse_begin(),
520 Keyword::END => self.parse_end(),
524 Keyword::SAVEPOINT => self.parse_savepoint(),
525 Keyword::RELEASE => self.parse_release(),
526 Keyword::COMMIT => self.parse_commit(),
527 Keyword::ROLLBACK => self.parse_rollback(),
528 Keyword::ASSERT => self.parse_assert(),
529 Keyword::DEALLOCATE => self.parse_deallocate(),
532 Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(),
533 Keyword::PREPARE => self.parse_prepare(),
534 Keyword::MERGE => self.parse_merge(),
535 Keyword::LISTEN if self.dialect.supports_listen() => self.parse_listen(),
538 Keyword::NOTIFY if self.dialect.supports_notify() => self.parse_notify(),
539 Keyword::PRAGMA => self.parse_pragma(),
541 Keyword::UNLOAD => self.parse_unload(),
542 Keyword::INSTALL if dialect_of!(self is DuckDbDialect | GenericDialect) => {
544 self.parse_install()
545 }
546 Keyword::LOAD if dialect_of!(self is DuckDbDialect | GenericDialect) => {
548 self.parse_load()
549 }
550 Keyword::OPTIMIZE if dialect_of!(self is ClickHouseDialect | GenericDialect) => {
552 self.parse_optimize_table()
553 }
554 _ => self.expected("an SQL statement", next_token),
555 },
556 Token::LParen => {
557 self.prev_token();
558 self.parse_query().map(Statement::Query)
559 }
560 _ => self.expected("an SQL statement", next_token),
561 }
562 }
563
564 pub fn parse_flush(&mut self) -> Result<Statement, ParserError> {
565 let mut channel = None;
566 let mut tables: Vec<ObjectName> = vec![];
567 let mut read_lock = false;
568 let mut export = false;
569
570 if !dialect_of!(self is MySqlDialect | GenericDialect) {
571 return parser_err!("Unsupported statement FLUSH", self.peek_token().location);
572 }
573
574 let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) {
575 Some(FlushLocation::NoWriteToBinlog)
576 } else if self.parse_keyword(Keyword::LOCAL) {
577 Some(FlushLocation::Local)
578 } else {
579 None
580 };
581
582 let object_type = if self.parse_keywords(&[Keyword::BINARY, Keyword::LOGS]) {
583 FlushType::BinaryLogs
584 } else if self.parse_keywords(&[Keyword::ENGINE, Keyword::LOGS]) {
585 FlushType::EngineLogs
586 } else if self.parse_keywords(&[Keyword::ERROR, Keyword::LOGS]) {
587 FlushType::ErrorLogs
588 } else if self.parse_keywords(&[Keyword::GENERAL, Keyword::LOGS]) {
589 FlushType::GeneralLogs
590 } else if self.parse_keywords(&[Keyword::HOSTS]) {
591 FlushType::Hosts
592 } else if self.parse_keyword(Keyword::PRIVILEGES) {
593 FlushType::Privileges
594 } else if self.parse_keyword(Keyword::OPTIMIZER_COSTS) {
595 FlushType::OptimizerCosts
596 } else if self.parse_keywords(&[Keyword::RELAY, Keyword::LOGS]) {
597 if self.parse_keywords(&[Keyword::FOR, Keyword::CHANNEL]) {
598 channel = Some(self.parse_object_name(false).unwrap().to_string());
599 }
600 FlushType::RelayLogs
601 } else if self.parse_keywords(&[Keyword::SLOW, Keyword::LOGS]) {
602 FlushType::SlowLogs
603 } else if self.parse_keyword(Keyword::STATUS) {
604 FlushType::Status
605 } else if self.parse_keyword(Keyword::USER_RESOURCES) {
606 FlushType::UserResources
607 } else if self.parse_keywords(&[Keyword::LOGS]) {
608 FlushType::Logs
609 } else if self.parse_keywords(&[Keyword::TABLES]) {
610 loop {
611 let next_token = self.next_token();
612 match &next_token.token {
613 Token::Word(w) => match w.keyword {
614 Keyword::WITH => {
615 read_lock = self.parse_keywords(&[Keyword::READ, Keyword::LOCK]);
616 }
617 Keyword::FOR => {
618 export = self.parse_keyword(Keyword::EXPORT);
619 }
620 Keyword::NoKeyword => {
621 self.prev_token();
622 tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
623 }
624 _ => {}
625 },
626 _ => {
627 break;
628 }
629 }
630 }
631
632 FlushType::Tables
633 } else {
634 return self.expected(
635 "BINARY LOGS, ENGINE LOGS, ERROR LOGS, GENERAL LOGS, HOSTS, LOGS, PRIVILEGES, OPTIMIZER_COSTS,\
636 RELAY LOGS [FOR CHANNEL channel], SLOW LOGS, STATUS, USER_RESOURCES",
637 self.peek_token(),
638 );
639 };
640
641 Ok(Statement::Flush {
642 object_type,
643 location,
644 channel,
645 read_lock,
646 export,
647 tables,
648 })
649 }
650
651 pub fn parse_msck(&mut self) -> Result<Statement, ParserError> {
652 let repair = self.parse_keyword(Keyword::REPAIR);
653 self.expect_keyword(Keyword::TABLE)?;
654 let table_name = self.parse_object_name(false)?;
655 let partition_action = self
656 .maybe_parse(|parser| {
657 let pa = match parser.parse_one_of_keywords(&[
658 Keyword::ADD,
659 Keyword::DROP,
660 Keyword::SYNC,
661 ]) {
662 Some(Keyword::ADD) => Some(AddDropSync::ADD),
663 Some(Keyword::DROP) => Some(AddDropSync::DROP),
664 Some(Keyword::SYNC) => Some(AddDropSync::SYNC),
665 _ => None,
666 };
667 parser.expect_keyword(Keyword::PARTITIONS)?;
668 Ok(pa)
669 })?
670 .unwrap_or_default();
671 Ok(Statement::Msck {
672 repair,
673 table_name,
674 partition_action,
675 })
676 }
677
678 pub fn parse_truncate(&mut self) -> Result<Statement, ParserError> {
679 let table = self.parse_keyword(Keyword::TABLE);
680 let only = self.parse_keyword(Keyword::ONLY);
681
682 let table_names = self
683 .parse_comma_separated(|p| p.parse_object_name(false))?
684 .into_iter()
685 .map(|n| TruncateTableTarget { name: n })
686 .collect();
687
688 let mut partitions = None;
689 if self.parse_keyword(Keyword::PARTITION) {
690 self.expect_token(&Token::LParen)?;
691 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
692 self.expect_token(&Token::RParen)?;
693 }
694
695 let mut identity = None;
696 let mut cascade = None;
697
698 if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
699 identity = if self.parse_keywords(&[Keyword::RESTART, Keyword::IDENTITY]) {
700 Some(TruncateIdentityOption::Restart)
701 } else if self.parse_keywords(&[Keyword::CONTINUE, Keyword::IDENTITY]) {
702 Some(TruncateIdentityOption::Continue)
703 } else {
704 None
705 };
706
707 cascade = if self.parse_keyword(Keyword::CASCADE) {
708 Some(TruncateCascadeOption::Cascade)
709 } else if self.parse_keyword(Keyword::RESTRICT) {
710 Some(TruncateCascadeOption::Restrict)
711 } else {
712 None
713 };
714 };
715
716 let on_cluster = self.parse_optional_on_cluster()?;
717
718 Ok(Statement::Truncate {
719 table_names,
720 partitions,
721 table,
722 only,
723 identity,
724 cascade,
725 on_cluster,
726 })
727 }
728
729 pub fn parse_attach_duckdb_database_options(
730 &mut self,
731 ) -> Result<Vec<AttachDuckDBDatabaseOption>, ParserError> {
732 if !self.consume_token(&Token::LParen) {
733 return Ok(vec![]);
734 }
735
736 let mut options = vec![];
737 loop {
738 if self.parse_keyword(Keyword::READ_ONLY) {
739 let boolean = if self.parse_keyword(Keyword::TRUE) {
740 Some(true)
741 } else if self.parse_keyword(Keyword::FALSE) {
742 Some(false)
743 } else {
744 None
745 };
746 options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean));
747 } else if self.parse_keyword(Keyword::TYPE) {
748 let ident = self.parse_identifier(false)?;
749 options.push(AttachDuckDBDatabaseOption::Type(ident));
750 } else {
751 return self.expected("expected one of: ), READ_ONLY, TYPE", self.peek_token());
752 };
753
754 if self.consume_token(&Token::RParen) {
755 return Ok(options);
756 } else if self.consume_token(&Token::Comma) {
757 continue;
758 } else {
759 return self.expected("expected one of: ')', ','", self.peek_token());
760 }
761 }
762 }
763
764 pub fn parse_attach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
765 let database = self.parse_keyword(Keyword::DATABASE);
766 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
767 let database_path = self.parse_identifier(false)?;
768 let database_alias = if self.parse_keyword(Keyword::AS) {
769 Some(self.parse_identifier(false)?)
770 } else {
771 None
772 };
773
774 let attach_options = self.parse_attach_duckdb_database_options()?;
775 Ok(Statement::AttachDuckDBDatabase {
776 if_not_exists,
777 database,
778 database_path,
779 database_alias,
780 attach_options,
781 })
782 }
783
784 pub fn parse_detach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
785 let database = self.parse_keyword(Keyword::DATABASE);
786 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
787 let database_alias = self.parse_identifier(false)?;
788 Ok(Statement::DetachDuckDBDatabase {
789 if_exists,
790 database,
791 database_alias,
792 })
793 }
794
795 pub fn parse_attach_database(&mut self) -> Result<Statement, ParserError> {
796 let database = self.parse_keyword(Keyword::DATABASE);
797 let database_file_name = self.parse_expr()?;
798 self.expect_keyword(Keyword::AS)?;
799 let schema_name = self.parse_identifier(false)?;
800 Ok(Statement::AttachDatabase {
801 database,
802 schema_name,
803 database_file_name,
804 })
805 }
806
807 pub fn parse_analyze(&mut self) -> Result<Statement, ParserError> {
808 self.expect_keyword(Keyword::TABLE)?;
809 let table_name = self.parse_object_name(false)?;
810 let mut for_columns = false;
811 let mut cache_metadata = false;
812 let mut noscan = false;
813 let mut partitions = None;
814 let mut compute_statistics = false;
815 let mut columns = vec![];
816 loop {
817 match self.parse_one_of_keywords(&[
818 Keyword::PARTITION,
819 Keyword::FOR,
820 Keyword::CACHE,
821 Keyword::NOSCAN,
822 Keyword::COMPUTE,
823 ]) {
824 Some(Keyword::PARTITION) => {
825 self.expect_token(&Token::LParen)?;
826 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
827 self.expect_token(&Token::RParen)?;
828 }
829 Some(Keyword::NOSCAN) => noscan = true,
830 Some(Keyword::FOR) => {
831 self.expect_keyword(Keyword::COLUMNS)?;
832
833 columns = self
834 .maybe_parse(|parser| {
835 parser.parse_comma_separated(|p| p.parse_identifier(false))
836 })?
837 .unwrap_or_default();
838 for_columns = true
839 }
840 Some(Keyword::CACHE) => {
841 self.expect_keyword(Keyword::METADATA)?;
842 cache_metadata = true
843 }
844 Some(Keyword::COMPUTE) => {
845 self.expect_keyword(Keyword::STATISTICS)?;
846 compute_statistics = true
847 }
848 _ => break,
849 }
850 }
851
852 Ok(Statement::Analyze {
853 table_name,
854 for_columns,
855 columns,
856 partitions,
857 cache_metadata,
858 noscan,
859 compute_statistics,
860 })
861 }
862
863 pub fn parse_wildcard_expr(&mut self) -> Result<Expr, ParserError> {
865 let index = self.index;
866
867 let next_token = self.next_token();
868 match next_token.token {
869 t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
870 if self.peek_token().token == Token::Period {
871 let mut id_parts: Vec<Ident> = vec![match t {
872 Token::Word(w) => w.to_ident(),
873 Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
874 _ => unreachable!(), }];
876
877 while self.consume_token(&Token::Period) {
878 let next_token = self.next_token();
879 match next_token.token {
880 Token::Word(w) => id_parts.push(w.to_ident()),
881 Token::SingleQuotedString(s) => {
882 id_parts.push(Ident::with_quote('\'', s))
884 }
885 Token::Mul => {
886 return Ok(Expr::QualifiedWildcard(ObjectName(id_parts)));
887 }
888 _ => {
889 return self
890 .expected("an identifier or a '*' after '.'", next_token);
891 }
892 }
893 }
894 }
895 }
896 Token::Mul => {
897 return Ok(Expr::Wildcard);
898 }
899 _ => (),
900 };
901
902 self.index = index;
903 self.parse_expr()
904 }
905
906 pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
908 self.parse_subexpr(self.dialect.prec_unknown())
909 }
910
911 pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
913 let _guard = self.recursion_counter.try_decrease()?;
914 debug!("parsing expr");
915 let mut expr = self.parse_prefix()?;
916 debug!("prefix: {:?}", expr);
917 loop {
918 let next_precedence = self.get_next_precedence()?;
919 debug!("next precedence: {:?}", next_precedence);
920
921 if precedence >= next_precedence {
922 break;
923 }
924
925 expr = self.parse_infix(expr, next_precedence)?;
926 }
927 Ok(expr)
928 }
929
930 pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
931 let condition = self.parse_expr()?;
932 let message = if self.parse_keyword(Keyword::AS) {
933 Some(self.parse_expr()?)
934 } else {
935 None
936 };
937
938 Ok(Statement::Assert { condition, message })
939 }
940
941 pub fn parse_savepoint(&mut self) -> Result<Statement, ParserError> {
942 let name = self.parse_identifier(false)?;
943 Ok(Statement::Savepoint { name })
944 }
945
946 pub fn parse_release(&mut self) -> Result<Statement, ParserError> {
947 let _ = self.parse_keyword(Keyword::SAVEPOINT);
948 let name = self.parse_identifier(false)?;
949
950 Ok(Statement::ReleaseSavepoint { name })
951 }
952
953 pub fn parse_listen(&mut self) -> Result<Statement, ParserError> {
954 let channel = self.parse_identifier(false)?;
955 Ok(Statement::LISTEN { channel })
956 }
957
958 pub fn parse_notify(&mut self) -> Result<Statement, ParserError> {
959 let channel = self.parse_identifier(false)?;
960 let payload = if self.consume_token(&Token::Comma) {
961 Some(self.parse_literal_string()?)
962 } else {
963 None
964 };
965 Ok(Statement::NOTIFY { channel, payload })
966 }
967
968 pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
970 if let Some(prefix) = self.dialect.parse_prefix(self) {
972 return prefix;
973 }
974
975 let loc = self.peek_token().location;
992 let opt_expr = self.maybe_parse(|parser| {
993 match parser.parse_data_type()? {
994 DataType::Interval => parser.parse_interval(),
995 DataType::Custom(..) => parser_err!("dummy", loc),
1003 data_type => Ok(Expr::TypedString {
1004 data_type,
1005 value: parser.parse_literal_string()?,
1006 }),
1007 }
1008 })?;
1009
1010 if let Some(expr) = opt_expr {
1011 return Ok(expr);
1012 }
1013
1014 let next_token = self.next_token();
1015 let expr = match next_token.token {
1016 Token::Word(w) => match w.keyword {
1017 Keyword::TRUE | Keyword::FALSE | Keyword::NULL => {
1018 self.prev_token();
1019 Ok(Expr::Value(self.parse_value()?))
1020 }
1021 Keyword::CURRENT_CATALOG
1022 | Keyword::CURRENT_USER
1023 | Keyword::SESSION_USER
1024 | Keyword::USER
1025 if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1026 {
1027 Ok(Expr::Function(Function {
1028 name: ObjectName(vec![w.to_ident()]),
1029 parameters: FunctionArguments::None,
1030 args: FunctionArguments::None,
1031 null_treatment: None,
1032 filter: None,
1033 over: None,
1034 within_group: vec![],
1035 }))
1036 }
1037 Keyword::CURRENT_TIMESTAMP
1038 | Keyword::CURRENT_TIME
1039 | Keyword::CURRENT_DATE
1040 | Keyword::LOCALTIME
1041 | Keyword::LOCALTIMESTAMP => {
1042 self.parse_time_functions(ObjectName(vec![w.to_ident()]))
1043 }
1044 Keyword::CASE => self.parse_case_expr(),
1045 Keyword::CONVERT => self.parse_convert_expr(false),
1046 Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => self.parse_convert_expr(true),
1047 Keyword::CAST => self.parse_cast_expr(CastKind::Cast),
1048 Keyword::TRY_CAST => self.parse_cast_expr(CastKind::TryCast),
1049 Keyword::SAFE_CAST => self.parse_cast_expr(CastKind::SafeCast),
1050 Keyword::EXISTS
1051 if !dialect_of!(self is DatabricksDialect)
1053 || matches!(
1054 self.peek_nth_token(1).token,
1055 Token::Word(Word {
1056 keyword: Keyword::SELECT | Keyword::WITH,
1057 ..
1058 })
1059 ) =>
1060 {
1061 self.parse_exists_expr(false)
1062 }
1063 Keyword::EXTRACT => self.parse_extract_expr(),
1064 Keyword::CEIL => self.parse_ceil_floor_expr(true),
1065 Keyword::FLOOR => self.parse_ceil_floor_expr(false),
1066 Keyword::POSITION if self.peek_token().token == Token::LParen => {
1067 self.parse_position_expr(w.to_ident())
1068 }
1069 Keyword::SUBSTRING => self.parse_substring_expr(),
1070 Keyword::OVERLAY => self.parse_overlay_expr(),
1071 Keyword::TRIM => self.parse_trim_expr(),
1072 Keyword::INTERVAL => self.parse_interval(),
1073 Keyword::ARRAY if self.peek_token() == Token::LBracket => {
1075 self.expect_token(&Token::LBracket)?;
1076 self.parse_array_expr(true)
1077 }
1078 Keyword::ARRAY
1079 if self.peek_token() == Token::LParen
1080 && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1081 {
1082 self.expect_token(&Token::LParen)?;
1083 let query = self.parse_query()?;
1084 self.expect_token(&Token::RParen)?;
1085 Ok(Expr::Function(Function {
1086 name: ObjectName(vec![w.to_ident()]),
1087 parameters: FunctionArguments::None,
1088 args: FunctionArguments::Subquery(query),
1089 filter: None,
1090 null_treatment: None,
1091 over: None,
1092 within_group: vec![],
1093 }))
1094 }
1095 Keyword::NOT => self.parse_not(),
1096 Keyword::MATCH if dialect_of!(self is MySqlDialect | GenericDialect) => {
1097 self.parse_match_against()
1098 }
1099 Keyword::STRUCT if dialect_of!(self is BigQueryDialect | GenericDialect) => {
1100 self.prev_token();
1101 self.parse_bigquery_struct_literal()
1102 }
1103 Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => {
1104 let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?;
1105 Ok(Expr::Prior(Box::new(expr)))
1106 }
1107 Keyword::MAP if self.peek_token() == Token::LBrace && self.dialect.support_map_literal_syntax() => {
1108 self.parse_duckdb_map_literal()
1109 }
1110 _ => match self.peek_token().token {
1113 Token::LParen | Token::Period => {
1114 let mut id_parts: Vec<Ident> = vec![w.to_ident()];
1115 let mut ends_with_wildcard = false;
1116 while self.consume_token(&Token::Period) {
1117 let next_token = self.next_token();
1118 match next_token.token {
1119 Token::Word(w) => id_parts.push(w.to_ident()),
1120 Token::Mul => {
1121 if dialect_of!(self is PostgreSqlDialect) {
1124 ends_with_wildcard = true;
1125 break;
1126 } else {
1127 return self
1128 .expected("an identifier after '.'", next_token);
1129 }
1130 }
1131 Token::SingleQuotedString(s) => {
1132 id_parts.push(Ident::with_quote('\'', s))
1133 }
1134 _ => {
1135 return self
1136 .expected("an identifier or a '*' after '.'", next_token);
1137 }
1138 }
1139 }
1140
1141 if ends_with_wildcard {
1142 Ok(Expr::QualifiedWildcard(ObjectName(id_parts)))
1143 } else if self.consume_token(&Token::LParen) {
1144 if dialect_of!(self is SnowflakeDialect | MsSqlDialect)
1145 && self.consume_tokens(&[Token::Plus, Token::RParen])
1146 {
1147 Ok(Expr::OuterJoin(Box::new(
1148 match <[Ident; 1]>::try_from(id_parts) {
1149 Ok([ident]) => Expr::Identifier(ident),
1150 Err(parts) => Expr::CompoundIdentifier(parts),
1151 },
1152 )))
1153 } else {
1154 self.prev_token();
1155 self.parse_function(ObjectName(id_parts))
1156 }
1157 } else {
1158 Ok(Expr::CompoundIdentifier(id_parts))
1159 }
1160 }
1161 Token::SingleQuotedString(_)
1163 | Token::DoubleQuotedString(_)
1164 | Token::HexStringLiteral(_)
1165 if w.value.starts_with('_') =>
1166 {
1167 Ok(Expr::IntroducedString {
1168 introducer: w.value,
1169 value: self.parse_introduced_string_value()?,
1170 })
1171 }
1172 Token::Arrow if self.dialect.supports_lambda_functions() => {
1173 self.expect_token(&Token::Arrow)?;
1174 return Ok(Expr::Lambda(LambdaFunction {
1175 params: OneOrManyWithParens::One(w.to_ident()),
1176 body: Box::new(self.parse_expr()?),
1177 }));
1178 }
1179 _ => Ok(Expr::Identifier(w.to_ident())),
1180 },
1181 }, Token::LBracket => self.parse_array_expr(false),
1184 tok @ Token::Minus | tok @ Token::Plus => {
1185 let op = if tok == Token::Plus {
1186 UnaryOperator::Plus
1187 } else {
1188 UnaryOperator::Minus
1189 };
1190 Ok(Expr::UnaryOp {
1191 op,
1192 expr: Box::new(
1193 self.parse_subexpr(self.dialect.prec_value(Precedence::MulDivModOp))?,
1194 ),
1195 })
1196 }
1197 tok @ Token::DoubleExclamationMark
1198 | tok @ Token::PGSquareRoot
1199 | tok @ Token::PGCubeRoot
1200 | tok @ Token::AtSign
1201 | tok @ Token::Tilde
1202 if dialect_of!(self is PostgreSqlDialect) =>
1203 {
1204 let op = match tok {
1205 Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial,
1206 Token::PGSquareRoot => UnaryOperator::PGSquareRoot,
1207 Token::PGCubeRoot => UnaryOperator::PGCubeRoot,
1208 Token::AtSign => UnaryOperator::PGAbs,
1209 Token::Tilde => UnaryOperator::PGBitwiseNot,
1210 _ => unreachable!(),
1211 };
1212 Ok(Expr::UnaryOp {
1213 op,
1214 expr: Box::new(
1215 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1216 ),
1217 })
1218 }
1219 Token::EscapedStringLiteral(_) if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1220 {
1221 self.prev_token();
1222 Ok(Expr::Value(self.parse_value()?))
1223 }
1224 Token::UnicodeStringLiteral(_) => {
1225 self.prev_token();
1226 Ok(Expr::Value(self.parse_value()?))
1227 }
1228 Token::Number(_, _)
1229 | Token::SingleQuotedString(_)
1230 | Token::DoubleQuotedString(_)
1231 | Token::TripleSingleQuotedString(_)
1232 | Token::TripleDoubleQuotedString(_)
1233 | Token::DollarQuotedString(_)
1234 | Token::SingleQuotedByteStringLiteral(_)
1235 | Token::DoubleQuotedByteStringLiteral(_)
1236 | Token::TripleSingleQuotedByteStringLiteral(_)
1237 | Token::TripleDoubleQuotedByteStringLiteral(_)
1238 | Token::SingleQuotedRawStringLiteral(_)
1239 | Token::DoubleQuotedRawStringLiteral(_)
1240 | Token::TripleSingleQuotedRawStringLiteral(_)
1241 | Token::TripleDoubleQuotedRawStringLiteral(_)
1242 | Token::NationalStringLiteral(_)
1243 | Token::HexStringLiteral(_) => {
1244 self.prev_token();
1245 Ok(Expr::Value(self.parse_value()?))
1246 }
1247 Token::LParen => {
1248 let expr = if let Some(expr) = self.try_parse_expr_sub_query()? {
1249 expr
1250 } else if let Some(lambda) = self.try_parse_lambda()? {
1251 return Ok(lambda);
1252 } else {
1253 let exprs = self.parse_comma_separated(Parser::parse_expr)?;
1254 match exprs.len() {
1255 0 => unreachable!(), 1 => Expr::Nested(Box::new(exprs.into_iter().next().unwrap())),
1257 _ => Expr::Tuple(exprs),
1258 }
1259 };
1260 self.expect_token(&Token::RParen)?;
1261 if !self.consume_token(&Token::Period) {
1262 Ok(expr)
1263 } else {
1264 let tok = self.next_token();
1265 let key = match tok.token {
1266 Token::Word(word) => word.to_ident(),
1267 _ => {
1268 return parser_err!(
1269 format!("Expected identifier, found: {tok}"),
1270 tok.location
1271 )
1272 }
1273 };
1274 Ok(Expr::CompositeAccess {
1275 expr: Box::new(expr),
1276 key,
1277 })
1278 }
1279 }
1280 Token::Placeholder(_) | Token::Colon | Token::AtSign => {
1281 self.prev_token();
1282 Ok(Expr::Value(self.parse_value()?))
1283 }
1284 Token::LBrace if self.dialect.supports_dictionary_syntax() => {
1285 self.prev_token();
1286 self.parse_duckdb_struct_literal()
1287 }
1288 _ => self.expected("an expression", next_token),
1289 }?;
1290
1291 if self.parse_keyword(Keyword::COLLATE) {
1292 Ok(Expr::Collate {
1293 expr: Box::new(expr),
1294 collation: self.parse_object_name(false)?,
1295 })
1296 } else {
1297 Ok(expr)
1298 }
1299 }
1300
1301 pub fn parse_utility_options(&mut self) -> Result<Vec<UtilityOption>, ParserError> {
1302 self.expect_token(&Token::LParen)?;
1303 let options = self.parse_comma_separated(Self::parse_utility_option)?;
1304 self.expect_token(&Token::RParen)?;
1305
1306 Ok(options)
1307 }
1308
1309 fn parse_utility_option(&mut self) -> Result<UtilityOption, ParserError> {
1310 let name = self.parse_identifier(false)?;
1311
1312 let next_token = self.peek_token();
1313 if next_token == Token::Comma || next_token == Token::RParen {
1314 return Ok(UtilityOption { name, arg: None });
1315 }
1316 let arg = self.parse_expr()?;
1317
1318 Ok(UtilityOption {
1319 name,
1320 arg: Some(arg),
1321 })
1322 }
1323
1324 fn try_parse_expr_sub_query(&mut self) -> Result<Option<Expr>, ParserError> {
1325 if !self.peek_sub_query() {
1326 return Ok(None);
1327 }
1328
1329 Ok(Some(Expr::Subquery(self.parse_query()?)))
1330 }
1331
1332 fn try_parse_lambda(&mut self) -> Result<Option<Expr>, ParserError> {
1333 if !self.dialect.supports_lambda_functions() {
1334 return Ok(None);
1335 }
1336 self.maybe_parse(|p| {
1337 let params = p.parse_comma_separated(|p| p.parse_identifier(false))?;
1338 p.expect_token(&Token::RParen)?;
1339 p.expect_token(&Token::Arrow)?;
1340 let expr = p.parse_expr()?;
1341 Ok(Expr::Lambda(LambdaFunction {
1342 params: OneOrManyWithParens::Many(params),
1343 body: Box::new(expr),
1344 }))
1345 })
1346 }
1347
1348 pub fn parse_function(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
1349 self.expect_token(&Token::LParen)?;
1350
1351 if dialect_of!(self is SnowflakeDialect) && self.peek_sub_query() {
1354 let subquery = self.parse_query()?;
1355 self.expect_token(&Token::RParen)?;
1356 return Ok(Expr::Function(Function {
1357 name,
1358 parameters: FunctionArguments::None,
1359 args: FunctionArguments::Subquery(subquery),
1360 filter: None,
1361 null_treatment: None,
1362 over: None,
1363 within_group: vec![],
1364 }));
1365 }
1366
1367 let mut args = self.parse_function_argument_list()?;
1368 let mut parameters = FunctionArguments::None;
1369 if dialect_of!(self is ClickHouseDialect | GenericDialect)
1372 && self.consume_token(&Token::LParen)
1373 {
1374 parameters = FunctionArguments::List(args);
1375 args = self.parse_function_argument_list()?;
1376 }
1377
1378 let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
1379 self.expect_token(&Token::LParen)?;
1380 self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
1381 let order_by = self.parse_comma_separated(Parser::parse_order_by_expr)?;
1382 self.expect_token(&Token::RParen)?;
1383 order_by
1384 } else {
1385 vec![]
1386 };
1387
1388 let filter = if self.dialect.supports_filter_during_aggregation()
1389 && self.parse_keyword(Keyword::FILTER)
1390 && self.consume_token(&Token::LParen)
1391 && self.parse_keyword(Keyword::WHERE)
1392 {
1393 let filter = Some(Box::new(self.parse_expr()?));
1394 self.expect_token(&Token::RParen)?;
1395 filter
1396 } else {
1397 None
1398 };
1399
1400 let null_treatment = if args
1403 .clauses
1404 .iter()
1405 .all(|clause| !matches!(clause, FunctionArgumentClause::IgnoreOrRespectNulls(_)))
1406 {
1407 self.parse_null_treatment()?
1408 } else {
1409 None
1410 };
1411
1412 let over = if self.parse_keyword(Keyword::OVER) {
1413 if self.consume_token(&Token::LParen) {
1414 let window_spec = self.parse_window_spec()?;
1415 Some(WindowType::WindowSpec(window_spec))
1416 } else {
1417 Some(WindowType::NamedWindow(self.parse_identifier(false)?))
1418 }
1419 } else {
1420 None
1421 };
1422
1423 Ok(Expr::Function(Function {
1424 name,
1425 parameters,
1426 args: FunctionArguments::List(args),
1427 null_treatment,
1428 filter,
1429 over,
1430 within_group,
1431 }))
1432 }
1433
1434 fn parse_null_treatment(&mut self) -> Result<Option<NullTreatment>, ParserError> {
1436 match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) {
1437 Some(keyword) => {
1438 self.expect_keyword(Keyword::NULLS)?;
1439
1440 Ok(match keyword {
1441 Keyword::RESPECT => Some(NullTreatment::RespectNulls),
1442 Keyword::IGNORE => Some(NullTreatment::IgnoreNulls),
1443 _ => None,
1444 })
1445 }
1446 None => Ok(None),
1447 }
1448 }
1449
1450 pub fn parse_time_functions(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
1451 let args = if self.consume_token(&Token::LParen) {
1452 FunctionArguments::List(self.parse_function_argument_list()?)
1453 } else {
1454 FunctionArguments::None
1455 };
1456 Ok(Expr::Function(Function {
1457 name,
1458 parameters: FunctionArguments::None,
1459 args,
1460 filter: None,
1461 over: None,
1462 null_treatment: None,
1463 within_group: vec![],
1464 }))
1465 }
1466
1467 pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
1468 let next_token = self.next_token();
1469 match &next_token.token {
1470 Token::Word(w) => match w.keyword {
1471 Keyword::ROWS => Ok(WindowFrameUnits::Rows),
1472 Keyword::RANGE => Ok(WindowFrameUnits::Range),
1473 Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
1474 _ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
1475 },
1476 _ => self.expected("ROWS, RANGE, GROUPS", next_token),
1477 }
1478 }
1479
1480 pub fn parse_window_frame(&mut self) -> Result<WindowFrame, ParserError> {
1481 let units = self.parse_window_frame_units()?;
1482 let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) {
1483 let start_bound = self.parse_window_frame_bound()?;
1484 self.expect_keyword(Keyword::AND)?;
1485 let end_bound = Some(self.parse_window_frame_bound()?);
1486 (start_bound, end_bound)
1487 } else {
1488 (self.parse_window_frame_bound()?, None)
1489 };
1490 Ok(WindowFrame {
1491 units,
1492 start_bound,
1493 end_bound,
1494 })
1495 }
1496
1497 pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
1499 if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
1500 Ok(WindowFrameBound::CurrentRow)
1501 } else {
1502 let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
1503 None
1504 } else {
1505 Some(Box::new(match self.peek_token().token {
1506 Token::SingleQuotedString(_) => self.parse_interval()?,
1507 _ => self.parse_expr()?,
1508 }))
1509 };
1510 if self.parse_keyword(Keyword::PRECEDING) {
1511 Ok(WindowFrameBound::Preceding(rows))
1512 } else if self.parse_keyword(Keyword::FOLLOWING) {
1513 Ok(WindowFrameBound::Following(rows))
1514 } else {
1515 self.expected("PRECEDING or FOLLOWING", self.peek_token())
1516 }
1517 }
1518 }
1519
1520 fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
1522 if self.dialect.supports_group_by_expr() {
1523 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
1524 self.expect_token(&Token::LParen)?;
1525 let result = self.parse_comma_separated(|p| p.parse_tuple(false, true))?;
1526 self.expect_token(&Token::RParen)?;
1527 Ok(Expr::GroupingSets(result))
1528 } else if self.parse_keyword(Keyword::CUBE) {
1529 self.expect_token(&Token::LParen)?;
1530 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
1531 self.expect_token(&Token::RParen)?;
1532 Ok(Expr::Cube(result))
1533 } else if self.parse_keyword(Keyword::ROLLUP) {
1534 self.expect_token(&Token::LParen)?;
1535 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
1536 self.expect_token(&Token::RParen)?;
1537 Ok(Expr::Rollup(result))
1538 } else if self.consume_tokens(&[Token::LParen, Token::RParen]) {
1539 Ok(Expr::Tuple(vec![]))
1543 } else {
1544 self.parse_expr()
1545 }
1546 } else {
1547 self.parse_expr()
1549 }
1550 }
1551
1552 fn parse_tuple(
1556 &mut self,
1557 lift_singleton: bool,
1558 allow_empty: bool,
1559 ) -> Result<Vec<Expr>, ParserError> {
1560 if lift_singleton {
1561 if self.consume_token(&Token::LParen) {
1562 let result = if allow_empty && self.consume_token(&Token::RParen) {
1563 vec![]
1564 } else {
1565 let result = self.parse_comma_separated(Parser::parse_expr)?;
1566 self.expect_token(&Token::RParen)?;
1567 result
1568 };
1569 Ok(result)
1570 } else {
1571 Ok(vec![self.parse_expr()?])
1572 }
1573 } else {
1574 self.expect_token(&Token::LParen)?;
1575 let result = if allow_empty && self.consume_token(&Token::RParen) {
1576 vec![]
1577 } else {
1578 let result = self.parse_comma_separated(Parser::parse_expr)?;
1579 self.expect_token(&Token::RParen)?;
1580 result
1581 };
1582 Ok(result)
1583 }
1584 }
1585
1586 pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
1587 let mut operand = None;
1588 if !self.parse_keyword(Keyword::WHEN) {
1589 operand = Some(Box::new(self.parse_expr()?));
1590 self.expect_keyword(Keyword::WHEN)?;
1591 }
1592 let mut conditions = vec![];
1593 let mut results = vec![];
1594 loop {
1595 conditions.push(self.parse_expr()?);
1596 self.expect_keyword(Keyword::THEN)?;
1597 results.push(self.parse_expr()?);
1598 if !self.parse_keyword(Keyword::WHEN) {
1599 break;
1600 }
1601 }
1602 let else_result = if self.parse_keyword(Keyword::ELSE) {
1603 Some(Box::new(self.parse_expr()?))
1604 } else {
1605 None
1606 };
1607 self.expect_keyword(Keyword::END)?;
1608 Ok(Expr::Case {
1609 operand,
1610 conditions,
1611 results,
1612 else_result,
1613 })
1614 }
1615
1616 pub fn parse_optional_cast_format(&mut self) -> Result<Option<CastFormat>, ParserError> {
1617 if self.parse_keyword(Keyword::FORMAT) {
1618 let value = self.parse_value()?;
1619 match self.parse_optional_time_zone()? {
1620 Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))),
1621 None => Ok(Some(CastFormat::Value(value))),
1622 }
1623 } else {
1624 Ok(None)
1625 }
1626 }
1627
1628 pub fn parse_optional_time_zone(&mut self) -> Result<Option<Value>, ParserError> {
1629 if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) {
1630 self.parse_value().map(Some)
1631 } else {
1632 Ok(None)
1633 }
1634 }
1635
1636 fn parse_mssql_convert(&mut self, is_try: bool) -> Result<Expr, ParserError> {
1638 self.expect_token(&Token::LParen)?;
1639 let data_type = self.parse_data_type()?;
1640 self.expect_token(&Token::Comma)?;
1641 let expr = self.parse_expr()?;
1642 let styles = if self.consume_token(&Token::Comma) {
1643 self.parse_comma_separated(Parser::parse_expr)?
1644 } else {
1645 Default::default()
1646 };
1647 self.expect_token(&Token::RParen)?;
1648 Ok(Expr::Convert {
1649 is_try,
1650 expr: Box::new(expr),
1651 data_type: Some(data_type),
1652 charset: None,
1653 target_before_value: true,
1654 styles,
1655 })
1656 }
1657
1658 pub fn parse_convert_expr(&mut self, is_try: bool) -> Result<Expr, ParserError> {
1663 if self.dialect.convert_type_before_value() {
1664 return self.parse_mssql_convert(is_try);
1665 }
1666 self.expect_token(&Token::LParen)?;
1667 let expr = self.parse_expr()?;
1668 if self.parse_keyword(Keyword::USING) {
1669 let charset = self.parse_object_name(false)?;
1670 self.expect_token(&Token::RParen)?;
1671 return Ok(Expr::Convert {
1672 is_try,
1673 expr: Box::new(expr),
1674 data_type: None,
1675 charset: Some(charset),
1676 target_before_value: false,
1677 styles: vec![],
1678 });
1679 }
1680 self.expect_token(&Token::Comma)?;
1681 let data_type = self.parse_data_type()?;
1682 let charset = if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
1683 Some(self.parse_object_name(false)?)
1684 } else {
1685 None
1686 };
1687 self.expect_token(&Token::RParen)?;
1688 Ok(Expr::Convert {
1689 is_try,
1690 expr: Box::new(expr),
1691 data_type: Some(data_type),
1692 charset,
1693 target_before_value: false,
1694 styles: vec![],
1695 })
1696 }
1697
1698 pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result<Expr, ParserError> {
1700 self.expect_token(&Token::LParen)?;
1701 let expr = self.parse_expr()?;
1702 self.expect_keyword(Keyword::AS)?;
1703 let data_type = self.parse_data_type()?;
1704 let format = self.parse_optional_cast_format()?;
1705 self.expect_token(&Token::RParen)?;
1706 Ok(Expr::Cast {
1707 kind,
1708 expr: Box::new(expr),
1709 data_type,
1710 format,
1711 })
1712 }
1713
1714 pub fn parse_exists_expr(&mut self, negated: bool) -> Result<Expr, ParserError> {
1716 self.expect_token(&Token::LParen)?;
1717 let exists_node = Expr::Exists {
1718 negated,
1719 subquery: self.parse_query()?,
1720 };
1721 self.expect_token(&Token::RParen)?;
1722 Ok(exists_node)
1723 }
1724
1725 pub fn parse_extract_expr(&mut self) -> Result<Expr, ParserError> {
1726 self.expect_token(&Token::LParen)?;
1727 let field = self.parse_date_time_field()?;
1728
1729 let syntax = if self.parse_keyword(Keyword::FROM) {
1730 ExtractSyntax::From
1731 } else if self.consume_token(&Token::Comma)
1732 && dialect_of!(self is SnowflakeDialect | GenericDialect)
1733 {
1734 ExtractSyntax::Comma
1735 } else {
1736 return Err(ParserError::ParserError(
1737 "Expected 'FROM' or ','".to_string(),
1738 ));
1739 };
1740
1741 let expr = self.parse_expr()?;
1742 self.expect_token(&Token::RParen)?;
1743 Ok(Expr::Extract {
1744 field,
1745 expr: Box::new(expr),
1746 syntax,
1747 })
1748 }
1749
1750 pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result<Expr, ParserError> {
1751 self.expect_token(&Token::LParen)?;
1752 let expr = self.parse_expr()?;
1753 let field = if self.parse_keyword(Keyword::TO) {
1755 CeilFloorKind::DateTimeField(self.parse_date_time_field()?)
1757 } else if self.consume_token(&Token::Comma) {
1758 match self.parse_value()? {
1760 Value::Number(n, s) => CeilFloorKind::Scale(Value::Number(n, s)),
1761 _ => {
1762 return Err(ParserError::ParserError(
1763 "Scale field can only be of number type".to_string(),
1764 ))
1765 }
1766 }
1767 } else {
1768 CeilFloorKind::DateTimeField(DateTimeField::NoDateTime)
1769 };
1770 self.expect_token(&Token::RParen)?;
1771 if is_ceil {
1772 Ok(Expr::Ceil {
1773 expr: Box::new(expr),
1774 field,
1775 })
1776 } else {
1777 Ok(Expr::Floor {
1778 expr: Box::new(expr),
1779 field,
1780 })
1781 }
1782 }
1783
1784 pub fn parse_position_expr(&mut self, ident: Ident) -> Result<Expr, ParserError> {
1785 let between_prec = self.dialect.prec_value(Precedence::Between);
1786 let position_expr = self.maybe_parse(|p| {
1787 p.expect_token(&Token::LParen)?;
1789
1790 let expr = p.parse_subexpr(between_prec)?;
1792 p.expect_keyword(Keyword::IN)?;
1793 let from = p.parse_expr()?;
1794 p.expect_token(&Token::RParen)?;
1795 Ok(Expr::Position {
1796 expr: Box::new(expr),
1797 r#in: Box::new(from),
1798 })
1799 })?;
1800 match position_expr {
1801 Some(expr) => Ok(expr),
1802 None => self.parse_function(ObjectName(vec![ident])),
1805 }
1806 }
1807
1808 pub fn parse_substring_expr(&mut self) -> Result<Expr, ParserError> {
1809 self.expect_token(&Token::LParen)?;
1811 let expr = self.parse_expr()?;
1812 let mut from_expr = None;
1813 let special = self.consume_token(&Token::Comma);
1814 if special || self.parse_keyword(Keyword::FROM) {
1815 from_expr = Some(self.parse_expr()?);
1816 }
1817
1818 let mut to_expr = None;
1819 if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) {
1820 to_expr = Some(self.parse_expr()?);
1821 }
1822 self.expect_token(&Token::RParen)?;
1823
1824 Ok(Expr::Substring {
1825 expr: Box::new(expr),
1826 substring_from: from_expr.map(Box::new),
1827 substring_for: to_expr.map(Box::new),
1828 special,
1829 })
1830 }
1831
1832 pub fn parse_overlay_expr(&mut self) -> Result<Expr, ParserError> {
1833 self.expect_token(&Token::LParen)?;
1835 let expr = self.parse_expr()?;
1836 self.expect_keyword(Keyword::PLACING)?;
1837 let what_expr = self.parse_expr()?;
1838 self.expect_keyword(Keyword::FROM)?;
1839 let from_expr = self.parse_expr()?;
1840 let mut for_expr = None;
1841 if self.parse_keyword(Keyword::FOR) {
1842 for_expr = Some(self.parse_expr()?);
1843 }
1844 self.expect_token(&Token::RParen)?;
1845
1846 Ok(Expr::Overlay {
1847 expr: Box::new(expr),
1848 overlay_what: Box::new(what_expr),
1849 overlay_from: Box::new(from_expr),
1850 overlay_for: for_expr.map(Box::new),
1851 })
1852 }
1853
1854 pub fn parse_trim_expr(&mut self) -> Result<Expr, ParserError> {
1860 self.expect_token(&Token::LParen)?;
1861 let mut trim_where = None;
1862 if let Token::Word(word) = self.peek_token().token {
1863 if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING]
1864 .iter()
1865 .any(|d| word.keyword == *d)
1866 {
1867 trim_where = Some(self.parse_trim_where()?);
1868 }
1869 }
1870 let expr = self.parse_expr()?;
1871 if self.parse_keyword(Keyword::FROM) {
1872 let trim_what = Box::new(expr);
1873 let expr = self.parse_expr()?;
1874 self.expect_token(&Token::RParen)?;
1875 Ok(Expr::Trim {
1876 expr: Box::new(expr),
1877 trim_where,
1878 trim_what: Some(trim_what),
1879 trim_characters: None,
1880 })
1881 } else if self.consume_token(&Token::Comma)
1882 && dialect_of!(self is SnowflakeDialect | BigQueryDialect | GenericDialect)
1883 {
1884 let characters = self.parse_comma_separated(Parser::parse_expr)?;
1885 self.expect_token(&Token::RParen)?;
1886 Ok(Expr::Trim {
1887 expr: Box::new(expr),
1888 trim_where: None,
1889 trim_what: None,
1890 trim_characters: Some(characters),
1891 })
1892 } else {
1893 self.expect_token(&Token::RParen)?;
1894 Ok(Expr::Trim {
1895 expr: Box::new(expr),
1896 trim_where,
1897 trim_what: None,
1898 trim_characters: None,
1899 })
1900 }
1901 }
1902
1903 pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
1904 let next_token = self.next_token();
1905 match &next_token.token {
1906 Token::Word(w) => match w.keyword {
1907 Keyword::BOTH => Ok(TrimWhereField::Both),
1908 Keyword::LEADING => Ok(TrimWhereField::Leading),
1909 Keyword::TRAILING => Ok(TrimWhereField::Trailing),
1910 _ => self.expected("trim_where field", next_token)?,
1911 },
1912 _ => self.expected("trim_where field", next_token),
1913 }
1914 }
1915
1916 pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
1919 let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
1920 self.expect_token(&Token::RBracket)?;
1921 Ok(Expr::Array(Array { elem: exprs, named }))
1922 }
1923
1924 pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
1925 if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
1926 if self.parse_keyword(Keyword::ERROR) {
1927 Ok(Some(ListAggOnOverflow::Error))
1928 } else {
1929 self.expect_keyword(Keyword::TRUNCATE)?;
1930 let filler = match self.peek_token().token {
1931 Token::Word(w)
1932 if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT =>
1933 {
1934 None
1935 }
1936 Token::SingleQuotedString(_)
1937 | Token::EscapedStringLiteral(_)
1938 | Token::UnicodeStringLiteral(_)
1939 | Token::NationalStringLiteral(_)
1940 | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)),
1941 _ => self.expected(
1942 "either filler, WITH, or WITHOUT in LISTAGG",
1943 self.peek_token(),
1944 )?,
1945 };
1946 let with_count = self.parse_keyword(Keyword::WITH);
1947 if !with_count && !self.parse_keyword(Keyword::WITHOUT) {
1948 self.expected("either WITH or WITHOUT in LISTAGG", self.peek_token())?;
1949 }
1950 self.expect_keyword(Keyword::COUNT)?;
1951 Ok(Some(ListAggOnOverflow::Truncate { filler, with_count }))
1952 }
1953 } else {
1954 Ok(None)
1955 }
1956 }
1957
1958 pub fn parse_date_time_field(&mut self) -> Result<DateTimeField, ParserError> {
1963 let next_token = self.next_token();
1964 match &next_token.token {
1965 Token::Word(w) => match w.keyword {
1966 Keyword::YEAR => Ok(DateTimeField::Year),
1967 Keyword::MONTH => Ok(DateTimeField::Month),
1968 Keyword::WEEK => {
1969 let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect)
1970 && self.consume_token(&Token::LParen)
1971 {
1972 let week_day = self.parse_identifier(false)?;
1973 self.expect_token(&Token::RParen)?;
1974 Some(week_day)
1975 } else {
1976 None
1977 };
1978 Ok(DateTimeField::Week(week_day))
1979 }
1980 Keyword::DAY => Ok(DateTimeField::Day),
1981 Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
1982 Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
1983 Keyword::DATE => Ok(DateTimeField::Date),
1984 Keyword::DATETIME => Ok(DateTimeField::Datetime),
1985 Keyword::HOUR => Ok(DateTimeField::Hour),
1986 Keyword::MINUTE => Ok(DateTimeField::Minute),
1987 Keyword::SECOND => Ok(DateTimeField::Second),
1988 Keyword::CENTURY => Ok(DateTimeField::Century),
1989 Keyword::DECADE => Ok(DateTimeField::Decade),
1990 Keyword::DOY => Ok(DateTimeField::Doy),
1991 Keyword::DOW => Ok(DateTimeField::Dow),
1992 Keyword::EPOCH => Ok(DateTimeField::Epoch),
1993 Keyword::ISODOW => Ok(DateTimeField::Isodow),
1994 Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
1995 Keyword::ISOWEEK => Ok(DateTimeField::IsoWeek),
1996 Keyword::JULIAN => Ok(DateTimeField::Julian),
1997 Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
1998 Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
1999 Keyword::MILLENIUM => Ok(DateTimeField::Millenium),
2000 Keyword::MILLENNIUM => Ok(DateTimeField::Millennium),
2001 Keyword::MILLISECOND => Ok(DateTimeField::Millisecond),
2002 Keyword::MILLISECONDS => Ok(DateTimeField::Milliseconds),
2003 Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
2004 Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
2005 Keyword::QUARTER => Ok(DateTimeField::Quarter),
2006 Keyword::TIME => Ok(DateTimeField::Time),
2007 Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
2008 Keyword::TIMEZONE_ABBR => Ok(DateTimeField::TimezoneAbbr),
2009 Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
2010 Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
2011 Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion),
2012 _ if self.dialect.allow_extract_custom() => {
2013 self.prev_token();
2014 let custom = self.parse_identifier(false)?;
2015 Ok(DateTimeField::Custom(custom))
2016 }
2017 _ => self.expected("date/time field", next_token),
2018 },
2019 Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => {
2020 self.prev_token();
2021 let custom = self.parse_identifier(false)?;
2022 Ok(DateTimeField::Custom(custom))
2023 }
2024 _ => self.expected("date/time field", next_token),
2025 }
2026 }
2027
2028 pub fn parse_not(&mut self) -> Result<Expr, ParserError> {
2029 match self.peek_token().token {
2030 Token::Word(w) => match w.keyword {
2031 Keyword::EXISTS => {
2032 let negated = true;
2033 let _ = self.parse_keyword(Keyword::EXISTS);
2034 self.parse_exists_expr(negated)
2035 }
2036 _ => Ok(Expr::UnaryOp {
2037 op: UnaryOperator::Not,
2038 expr: Box::new(
2039 self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?,
2040 ),
2041 }),
2042 },
2043 _ => Ok(Expr::UnaryOp {
2044 op: UnaryOperator::Not,
2045 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
2046 }),
2047 }
2048 }
2049
2050 pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
2056 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
2057
2058 self.expect_keyword(Keyword::AGAINST)?;
2059
2060 self.expect_token(&Token::LParen)?;
2061
2062 let match_value = self.parse_value()?;
2064
2065 let in_natural_language_mode_keywords = &[
2066 Keyword::IN,
2067 Keyword::NATURAL,
2068 Keyword::LANGUAGE,
2069 Keyword::MODE,
2070 ];
2071
2072 let with_query_expansion_keywords = &[Keyword::WITH, Keyword::QUERY, Keyword::EXPANSION];
2073
2074 let in_boolean_mode_keywords = &[Keyword::IN, Keyword::BOOLEAN, Keyword::MODE];
2075
2076 let opt_search_modifier = if self.parse_keywords(in_natural_language_mode_keywords) {
2077 if self.parse_keywords(with_query_expansion_keywords) {
2078 Some(SearchModifier::InNaturalLanguageModeWithQueryExpansion)
2079 } else {
2080 Some(SearchModifier::InNaturalLanguageMode)
2081 }
2082 } else if self.parse_keywords(in_boolean_mode_keywords) {
2083 Some(SearchModifier::InBooleanMode)
2084 } else if self.parse_keywords(with_query_expansion_keywords) {
2085 Some(SearchModifier::WithQueryExpansion)
2086 } else {
2087 None
2088 };
2089
2090 self.expect_token(&Token::RParen)?;
2091
2092 Ok(Expr::MatchAgainst {
2093 columns,
2094 match_value,
2095 opt_search_modifier,
2096 })
2097 }
2098
2099 pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
2115 let value = if self.dialect.require_interval_qualifier() {
2124 self.parse_expr()?
2126 } else {
2127 self.parse_prefix()?
2130 };
2131
2132 let leading_field = if self.next_token_is_temporal_unit() {
2138 Some(self.parse_date_time_field()?)
2139 } else if self.dialect.require_interval_qualifier() {
2140 return parser_err!(
2141 "INTERVAL requires a unit after the literal value",
2142 self.peek_token().location
2143 );
2144 } else {
2145 None
2146 };
2147
2148 let (leading_precision, last_field, fsec_precision) =
2149 if leading_field == Some(DateTimeField::Second) {
2150 let last_field = None;
2156 let (leading_precision, fsec_precision) = self.parse_optional_precision_scale()?;
2157 (leading_precision, last_field, fsec_precision)
2158 } else {
2159 let leading_precision = self.parse_optional_precision()?;
2160 if self.parse_keyword(Keyword::TO) {
2161 let last_field = Some(self.parse_date_time_field()?);
2162 let fsec_precision = if last_field == Some(DateTimeField::Second) {
2163 self.parse_optional_precision()?
2164 } else {
2165 None
2166 };
2167 (leading_precision, last_field, fsec_precision)
2168 } else {
2169 (leading_precision, None, None)
2170 }
2171 };
2172
2173 Ok(Expr::Interval(Interval {
2174 value: Box::new(value),
2175 leading_field,
2176 leading_precision,
2177 last_field,
2178 fractional_seconds_precision: fsec_precision,
2179 }))
2180 }
2181
2182 pub fn next_token_is_temporal_unit(&mut self) -> bool {
2185 if let Token::Word(word) = self.peek_token().token {
2186 matches!(
2187 word.keyword,
2188 Keyword::YEAR
2189 | Keyword::MONTH
2190 | Keyword::WEEK
2191 | Keyword::DAY
2192 | Keyword::HOUR
2193 | Keyword::MINUTE
2194 | Keyword::SECOND
2195 | Keyword::CENTURY
2196 | Keyword::DECADE
2197 | Keyword::DOW
2198 | Keyword::DOY
2199 | Keyword::EPOCH
2200 | Keyword::ISODOW
2201 | Keyword::ISOYEAR
2202 | Keyword::JULIAN
2203 | Keyword::MICROSECOND
2204 | Keyword::MICROSECONDS
2205 | Keyword::MILLENIUM
2206 | Keyword::MILLENNIUM
2207 | Keyword::MILLISECOND
2208 | Keyword::MILLISECONDS
2209 | Keyword::NANOSECOND
2210 | Keyword::NANOSECONDS
2211 | Keyword::QUARTER
2212 | Keyword::TIMEZONE
2213 | Keyword::TIMEZONE_HOUR
2214 | Keyword::TIMEZONE_MINUTE
2215 )
2216 } else {
2217 false
2218 }
2219 }
2220
2221 fn parse_bigquery_struct_literal(&mut self) -> Result<Expr, ParserError> {
2230 let (fields, trailing_bracket) =
2231 self.parse_struct_type_def(Self::parse_struct_field_def)?;
2232 if trailing_bracket.0 {
2233 return parser_err!("unmatched > in STRUCT literal", self.peek_token().location);
2234 }
2235
2236 self.expect_token(&Token::LParen)?;
2237 let values = self
2238 .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?;
2239 self.expect_token(&Token::RParen)?;
2240
2241 Ok(Expr::Struct { values, fields })
2242 }
2243
2244 fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result<Expr, ParserError> {
2258 let expr = self.parse_expr()?;
2259 if self.parse_keyword(Keyword::AS) {
2260 if typed_syntax {
2261 return parser_err!("Typed syntax does not allow AS", {
2262 self.prev_token();
2263 self.peek_token().location
2264 });
2265 }
2266 let field_name = self.parse_identifier(false)?;
2267 Ok(Expr::Named {
2268 expr: expr.into(),
2269 name: field_name,
2270 })
2271 } else {
2272 Ok(expr)
2273 }
2274 }
2275
2276 fn parse_struct_type_def<F>(
2289 &mut self,
2290 mut elem_parser: F,
2291 ) -> Result<(Vec<StructField>, MatchedTrailingBracket), ParserError>
2292 where
2293 F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>,
2294 {
2295 let start_token = self.peek_token();
2296 self.expect_keyword(Keyword::STRUCT)?;
2297
2298 if Token::Lt != self.peek_token() {
2300 return Ok((Default::default(), false.into()));
2301 }
2302 self.next_token();
2303
2304 let mut field_defs = vec![];
2305 let trailing_bracket = loop {
2306 let (def, trailing_bracket) = elem_parser(self)?;
2307 field_defs.push(def);
2308 if !self.consume_token(&Token::Comma) {
2309 break trailing_bracket;
2310 }
2311
2312 if trailing_bracket.0 {
2316 return parser_err!("unmatched > in STRUCT definition", start_token.location);
2317 }
2318 };
2319
2320 Ok((
2321 field_defs,
2322 self.expect_closing_angle_bracket(trailing_bracket)?,
2323 ))
2324 }
2325
2326 fn parse_duckdb_struct_type_def(&mut self) -> Result<Vec<StructField>, ParserError> {
2328 self.expect_keyword(Keyword::STRUCT)?;
2329 self.expect_token(&Token::LParen)?;
2330 let struct_body = self.parse_comma_separated(|parser| {
2331 let field_name = parser.parse_identifier(false)?;
2332 let field_type = parser.parse_data_type()?;
2333
2334 Ok(StructField {
2335 field_name: Some(field_name),
2336 field_type,
2337 })
2338 });
2339 self.expect_token(&Token::RParen)?;
2340 struct_body
2341 }
2342
2343 fn parse_struct_field_def(
2353 &mut self,
2354 ) -> Result<(StructField, MatchedTrailingBracket), ParserError> {
2355 let is_anonymous_field = !matches!(
2358 (self.peek_nth_token(0).token, self.peek_nth_token(1).token),
2359 (Token::Word(_), Token::Word(_))
2360 );
2361
2362 let field_name = if is_anonymous_field {
2363 None
2364 } else {
2365 Some(self.parse_identifier(false)?)
2366 };
2367
2368 let (field_type, trailing_bracket) = self.parse_data_type_helper()?;
2369
2370 Ok((
2371 StructField {
2372 field_name,
2373 field_type,
2374 },
2375 trailing_bracket,
2376 ))
2377 }
2378
2379 fn parse_union_type_def(&mut self) -> Result<Vec<UnionField>, ParserError> {
2389 self.expect_keyword(Keyword::UNION)?;
2390
2391 self.expect_token(&Token::LParen)?;
2392
2393 let fields = self.parse_comma_separated(|p| {
2394 Ok(UnionField {
2395 field_name: p.parse_identifier(false)?,
2396 field_type: p.parse_data_type()?,
2397 })
2398 })?;
2399
2400 self.expect_token(&Token::RParen)?;
2401
2402 Ok(fields)
2403 }
2404
2405 fn parse_duckdb_struct_literal(&mut self) -> Result<Expr, ParserError> {
2415 self.expect_token(&Token::LBrace)?;
2416
2417 let fields = self.parse_comma_separated(Self::parse_duckdb_dictionary_field)?;
2418
2419 self.expect_token(&Token::RBrace)?;
2420
2421 Ok(Expr::Dictionary(fields))
2422 }
2423
2424 fn parse_duckdb_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
2434 let key = self.parse_identifier(false)?;
2435
2436 self.expect_token(&Token::Colon)?;
2437
2438 let expr = self.parse_expr()?;
2439
2440 Ok(DictionaryField {
2441 key,
2442 value: Box::new(expr),
2443 })
2444 }
2445
2446 fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
2456 self.expect_token(&Token::LBrace)?;
2457 let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?;
2458 self.expect_token(&Token::RBrace)?;
2459 Ok(Expr::Map(Map { entries: fields }))
2460 }
2461
2462 fn parse_duckdb_map_field(&mut self) -> Result<MapEntry, ParserError> {
2472 let key = self.parse_expr()?;
2473
2474 self.expect_token(&Token::Colon)?;
2475
2476 let value = self.parse_expr()?;
2477
2478 Ok(MapEntry {
2479 key: Box::new(key),
2480 value: Box::new(value),
2481 })
2482 }
2483
2484 fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> {
2494 self.expect_keyword(Keyword::MAP)?;
2495 self.expect_token(&Token::LParen)?;
2496 let key_data_type = self.parse_data_type()?;
2497 self.expect_token(&Token::Comma)?;
2498 let value_data_type = self.parse_data_type()?;
2499 self.expect_token(&Token::RParen)?;
2500
2501 Ok((key_data_type, value_data_type))
2502 }
2503
2504 fn parse_click_house_tuple_def(&mut self) -> Result<Vec<StructField>, ParserError> {
2514 self.expect_keyword(Keyword::TUPLE)?;
2515 self.expect_token(&Token::LParen)?;
2516 let mut field_defs = vec![];
2517 loop {
2518 let (def, _) = self.parse_struct_field_def()?;
2519 field_defs.push(def);
2520 if !self.consume_token(&Token::Comma) {
2521 break;
2522 }
2523 }
2524 self.expect_token(&Token::RParen)?;
2525
2526 Ok(field_defs)
2527 }
2528
2529 fn expect_closing_angle_bracket(
2534 &mut self,
2535 trailing_bracket: MatchedTrailingBracket,
2536 ) -> Result<MatchedTrailingBracket, ParserError> {
2537 let trailing_bracket = if !trailing_bracket.0 {
2538 match self.peek_token().token {
2539 Token::Gt => {
2540 self.next_token();
2541 false.into()
2542 }
2543 Token::ShiftRight => {
2544 self.next_token();
2545 true.into()
2546 }
2547 _ => return self.expected(">", self.peek_token()),
2548 }
2549 } else {
2550 false.into()
2551 };
2552
2553 Ok(trailing_bracket)
2554 }
2555
2556 pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result<Expr, ParserError> {
2558 if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) {
2560 return infix;
2561 }
2562
2563 let mut tok = self.next_token();
2564 let regular_binary_operator = match &mut tok.token {
2565 Token::Spaceship => Some(BinaryOperator::Spaceship),
2566 Token::DoubleEq => Some(BinaryOperator::Eq),
2567 Token::Eq => Some(BinaryOperator::Eq),
2568 Token::Neq => Some(BinaryOperator::NotEq),
2569 Token::Gt => Some(BinaryOperator::Gt),
2570 Token::GtEq => Some(BinaryOperator::GtEq),
2571 Token::Lt => Some(BinaryOperator::Lt),
2572 Token::LtEq => Some(BinaryOperator::LtEq),
2573 Token::Plus => Some(BinaryOperator::Plus),
2574 Token::Minus => Some(BinaryOperator::Minus),
2575 Token::Mul => Some(BinaryOperator::Multiply),
2576 Token::Mod => Some(BinaryOperator::Modulo),
2577 Token::StringConcat => Some(BinaryOperator::StringConcat),
2578 Token::Pipe => Some(BinaryOperator::BitwiseOr),
2579 Token::Caret => {
2580 if dialect_of!(self is PostgreSqlDialect) {
2583 Some(BinaryOperator::PGExp)
2584 } else {
2585 Some(BinaryOperator::BitwiseXor)
2586 }
2587 }
2588 Token::Ampersand => Some(BinaryOperator::BitwiseAnd),
2589 Token::Div => Some(BinaryOperator::Divide),
2590 Token::DuckIntDiv if dialect_of!(self is DuckDbDialect | GenericDialect) => {
2591 Some(BinaryOperator::DuckIntegerDivide)
2592 }
2593 Token::ShiftLeft if dialect_of!(self is PostgreSqlDialect | DuckDbDialect | GenericDialect) => {
2594 Some(BinaryOperator::PGBitwiseShiftLeft)
2595 }
2596 Token::ShiftRight if dialect_of!(self is PostgreSqlDialect | DuckDbDialect | GenericDialect) => {
2597 Some(BinaryOperator::PGBitwiseShiftRight)
2598 }
2599 Token::Sharp if dialect_of!(self is PostgreSqlDialect) => {
2600 Some(BinaryOperator::PGBitwiseXor)
2601 }
2602 Token::Overlap if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
2603 Some(BinaryOperator::PGOverlap)
2604 }
2605 Token::CaretAt if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
2606 Some(BinaryOperator::PGStartsWith)
2607 }
2608 Token::Tilde => Some(BinaryOperator::PGRegexMatch),
2609 Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch),
2610 Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch),
2611 Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch),
2612 Token::DoubleTilde => Some(BinaryOperator::PGLikeMatch),
2613 Token::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch),
2614 Token::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch),
2615 Token::ExclamationMarkDoubleTildeAsterisk => Some(BinaryOperator::PGNotILikeMatch),
2616 Token::Arrow => Some(BinaryOperator::Arrow),
2617 Token::LongArrow => Some(BinaryOperator::LongArrow),
2618 Token::HashArrow => Some(BinaryOperator::HashArrow),
2619 Token::HashLongArrow => Some(BinaryOperator::HashLongArrow),
2620 Token::AtArrow => Some(BinaryOperator::AtArrow),
2621 Token::ArrowAt => Some(BinaryOperator::ArrowAt),
2622 Token::HashMinus => Some(BinaryOperator::HashMinus),
2623 Token::AtQuestion => Some(BinaryOperator::AtQuestion),
2624 Token::AtAt => Some(BinaryOperator::AtAt),
2625 Token::Question => Some(BinaryOperator::Question),
2626 Token::QuestionAnd => Some(BinaryOperator::QuestionAnd),
2627 Token::QuestionPipe => Some(BinaryOperator::QuestionPipe),
2628 Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(core::mem::take(s))),
2629
2630 Token::Word(w) => match w.keyword {
2631 Keyword::AND => Some(BinaryOperator::And),
2632 Keyword::OR => Some(BinaryOperator::Or),
2633 Keyword::XOR => Some(BinaryOperator::Xor),
2634 Keyword::OPERATOR if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
2635 self.expect_token(&Token::LParen)?;
2636 let mut idents = vec![];
2641 loop {
2642 idents.push(self.next_token().to_string());
2643 if !self.consume_token(&Token::Period) {
2644 break;
2645 }
2646 }
2647 self.expect_token(&Token::RParen)?;
2648 Some(BinaryOperator::PGCustomBinaryOperator(idents))
2649 }
2650 _ => None,
2651 },
2652 _ => None,
2653 };
2654
2655 if let Some(op) = regular_binary_operator {
2656 if let Some(keyword) =
2657 self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME])
2658 {
2659 self.expect_token(&Token::LParen)?;
2660 let right = if self.peek_sub_query() {
2661 self.prev_token(); self.parse_subexpr(precedence)?
2665 } else {
2666 let right = self.parse_subexpr(precedence)?;
2668 self.expect_token(&Token::RParen)?;
2669 right
2670 };
2671
2672 if !matches!(
2673 op,
2674 BinaryOperator::Gt
2675 | BinaryOperator::Lt
2676 | BinaryOperator::GtEq
2677 | BinaryOperator::LtEq
2678 | BinaryOperator::Eq
2679 | BinaryOperator::NotEq
2680 ) {
2681 return parser_err!(
2682 format!(
2683 "Expected one of [=, >, <, =>, =<, !=] as comparison operator, found: {op}"
2684 ),
2685 tok.location
2686 );
2687 };
2688
2689 Ok(match keyword {
2690 Keyword::ALL => Expr::AllOp {
2691 left: Box::new(expr),
2692 compare_op: op,
2693 right: Box::new(right),
2694 },
2695 Keyword::ANY | Keyword::SOME => Expr::AnyOp {
2696 left: Box::new(expr),
2697 compare_op: op,
2698 right: Box::new(right),
2699 is_some: keyword == Keyword::SOME,
2700 },
2701 _ => unreachable!(),
2702 })
2703 } else {
2704 Ok(Expr::BinaryOp {
2705 left: Box::new(expr),
2706 op,
2707 right: Box::new(self.parse_subexpr(precedence)?),
2708 })
2709 }
2710 } else if let Token::Word(w) = &tok.token {
2711 match w.keyword {
2712 Keyword::IS => {
2713 if self.parse_keyword(Keyword::NULL) {
2714 Ok(Expr::IsNull(Box::new(expr)))
2715 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
2716 Ok(Expr::IsNotNull(Box::new(expr)))
2717 } else if self.parse_keywords(&[Keyword::TRUE]) {
2718 Ok(Expr::IsTrue(Box::new(expr)))
2719 } else if self.parse_keywords(&[Keyword::NOT, Keyword::TRUE]) {
2720 Ok(Expr::IsNotTrue(Box::new(expr)))
2721 } else if self.parse_keywords(&[Keyword::FALSE]) {
2722 Ok(Expr::IsFalse(Box::new(expr)))
2723 } else if self.parse_keywords(&[Keyword::NOT, Keyword::FALSE]) {
2724 Ok(Expr::IsNotFalse(Box::new(expr)))
2725 } else if self.parse_keywords(&[Keyword::UNKNOWN]) {
2726 Ok(Expr::IsUnknown(Box::new(expr)))
2727 } else if self.parse_keywords(&[Keyword::NOT, Keyword::UNKNOWN]) {
2728 Ok(Expr::IsNotUnknown(Box::new(expr)))
2729 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::FROM]) {
2730 let expr2 = self.parse_expr()?;
2731 Ok(Expr::IsDistinctFrom(Box::new(expr), Box::new(expr2)))
2732 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DISTINCT, Keyword::FROM])
2733 {
2734 let expr2 = self.parse_expr()?;
2735 Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
2736 } else {
2737 self.expected(
2738 "[NOT] NULL or TRUE|FALSE or [NOT] DISTINCT FROM after IS",
2739 self.peek_token(),
2740 )
2741 }
2742 }
2743 Keyword::AT => {
2744 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
2745 Ok(Expr::AtTimeZone {
2746 timestamp: Box::new(expr),
2747 time_zone: Box::new(self.parse_subexpr(precedence)?),
2748 })
2749 }
2750 Keyword::NOT
2751 | Keyword::IN
2752 | Keyword::BETWEEN
2753 | Keyword::LIKE
2754 | Keyword::ILIKE
2755 | Keyword::SIMILAR
2756 | Keyword::REGEXP
2757 | Keyword::RLIKE => {
2758 self.prev_token();
2759 let negated = self.parse_keyword(Keyword::NOT);
2760 let regexp = self.parse_keyword(Keyword::REGEXP);
2761 let rlike = self.parse_keyword(Keyword::RLIKE);
2762 if regexp || rlike {
2763 Ok(Expr::RLike {
2764 negated,
2765 expr: Box::new(expr),
2766 pattern: Box::new(
2767 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
2768 ),
2769 regexp,
2770 })
2771 } else if self.parse_keyword(Keyword::IN) {
2772 self.parse_in(expr, negated)
2773 } else if self.parse_keyword(Keyword::BETWEEN) {
2774 self.parse_between(expr, negated)
2775 } else if self.parse_keyword(Keyword::LIKE) {
2776 Ok(Expr::Like {
2777 negated,
2778 any: self.parse_keyword(Keyword::ANY),
2779 expr: Box::new(expr),
2780 pattern: Box::new(
2781 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
2782 ),
2783 escape_char: self.parse_escape_char()?,
2784 })
2785 } else if self.parse_keyword(Keyword::ILIKE) {
2786 Ok(Expr::ILike {
2787 negated,
2788 any: self.parse_keyword(Keyword::ANY),
2789 expr: Box::new(expr),
2790 pattern: Box::new(
2791 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
2792 ),
2793 escape_char: self.parse_escape_char()?,
2794 })
2795 } else if self.parse_keywords(&[Keyword::SIMILAR, Keyword::TO]) {
2796 Ok(Expr::SimilarTo {
2797 negated,
2798 expr: Box::new(expr),
2799 pattern: Box::new(
2800 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
2801 ),
2802 escape_char: self.parse_escape_char()?,
2803 })
2804 } else {
2805 self.expected("IN or BETWEEN after NOT", self.peek_token())
2806 }
2807 }
2808 _ => parser_err!(
2810 format!("No infix parser for token {:?}", tok.token),
2811 tok.location
2812 ),
2813 }
2814 } else if Token::DoubleColon == tok {
2815 Ok(Expr::Cast {
2816 kind: CastKind::DoubleColon,
2817 expr: Box::new(expr),
2818 data_type: self.parse_data_type()?,
2819 format: None,
2820 })
2821 } else if Token::ExclamationMark == tok {
2822 Ok(Expr::UnaryOp {
2824 op: UnaryOperator::PGPostfixFactorial,
2825 expr: Box::new(expr),
2826 })
2827 } else if Token::LBracket == tok {
2828 if dialect_of!(self is PostgreSqlDialect | DuckDbDialect | GenericDialect) {
2829 self.parse_subscript(expr)
2830 } else if dialect_of!(self is SnowflakeDialect) {
2831 self.prev_token();
2832 self.parse_json_access(expr)
2833 } else {
2834 self.parse_map_access(expr)
2835 }
2836 } else if dialect_of!(self is SnowflakeDialect | GenericDialect) && Token::Colon == tok {
2837 self.prev_token();
2838 self.parse_json_access(expr)
2839 } else {
2840 parser_err!(
2842 format!("No infix parser for token {:?}", tok.token),
2843 tok.location
2844 )
2845 }
2846 }
2847
2848 pub fn parse_escape_char(&mut self) -> Result<Option<String>, ParserError> {
2850 if self.parse_keyword(Keyword::ESCAPE) {
2851 Ok(Some(self.parse_literal_string()?))
2852 } else {
2853 Ok(None)
2854 }
2855 }
2856
2857 fn parse_subscript_inner(&mut self) -> Result<Subscript, ParserError> {
2867 let lower_bound = if self.consume_token(&Token::Colon) {
2869 None
2870 } else {
2871 Some(self.parse_expr()?)
2872 };
2873
2874 if self.consume_token(&Token::RBracket) {
2876 if let Some(lower_bound) = lower_bound {
2877 return Ok(Subscript::Index { index: lower_bound });
2878 };
2879 return Ok(Subscript::Slice {
2880 lower_bound,
2881 upper_bound: None,
2882 stride: None,
2883 });
2884 }
2885
2886 if lower_bound.is_some() {
2888 self.expect_token(&Token::Colon)?;
2889 }
2890
2891 let upper_bound = if self.consume_token(&Token::RBracket) {
2893 return Ok(Subscript::Slice {
2894 lower_bound,
2895 upper_bound: None,
2896 stride: None,
2897 });
2898 } else {
2899 Some(self.parse_expr()?)
2900 };
2901
2902 if self.consume_token(&Token::RBracket) {
2904 return Ok(Subscript::Slice {
2905 lower_bound,
2906 upper_bound,
2907 stride: None,
2908 });
2909 }
2910
2911 self.expect_token(&Token::Colon)?;
2913 let stride = if self.consume_token(&Token::RBracket) {
2914 None
2915 } else {
2916 Some(self.parse_expr()?)
2917 };
2918
2919 if stride.is_some() {
2920 self.expect_token(&Token::RBracket)?;
2921 }
2922
2923 Ok(Subscript::Slice {
2924 lower_bound,
2925 upper_bound,
2926 stride,
2927 })
2928 }
2929
2930 pub fn parse_subscript(&mut self, expr: Expr) -> Result<Expr, ParserError> {
2934 let subscript = self.parse_subscript_inner()?;
2935 Ok(Expr::Subscript {
2936 expr: Box::new(expr),
2937 subscript: Box::new(subscript),
2938 })
2939 }
2940
2941 fn parse_json_path_object_key(&mut self) -> Result<JsonPathElem, ParserError> {
2942 let token = self.next_token();
2943 match token.token {
2944 Token::Word(Word {
2945 value,
2946 quote_style: quote_style @ (Some('"') | None),
2948 keyword: _,
2951 }) => Ok(JsonPathElem::Dot {
2952 key: value,
2953 quoted: quote_style.is_some(),
2954 }),
2955
2956 Token::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }),
2960
2961 _ => self.expected("variant object key name", token),
2962 }
2963 }
2964
2965 fn parse_json_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
2966 let mut path = Vec::new();
2967 loop {
2968 match self.next_token().token {
2969 Token::Colon if path.is_empty() => {
2970 path.push(self.parse_json_path_object_key()?);
2971 }
2972 Token::Period if !path.is_empty() => {
2973 path.push(self.parse_json_path_object_key()?);
2974 }
2975 Token::LBracket => {
2976 let key = self.parse_expr()?;
2977 self.expect_token(&Token::RBracket)?;
2978
2979 path.push(JsonPathElem::Bracket { key });
2980 }
2981 _ => {
2982 self.prev_token();
2983 break;
2984 }
2985 };
2986 }
2987
2988 debug_assert!(!path.is_empty());
2989 Ok(Expr::JsonAccess {
2990 value: Box::new(expr),
2991 path: JsonPath { path },
2992 })
2993 }
2994
2995 pub fn parse_map_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
2996 let key = self.parse_expr()?;
2997 self.expect_token(&Token::RBracket)?;
2998
2999 let mut keys = vec![MapAccessKey {
3000 key,
3001 syntax: MapAccessSyntax::Bracket,
3002 }];
3003 loop {
3004 let key = match self.peek_token().token {
3005 Token::LBracket => {
3006 self.next_token(); let key = self.parse_expr()?;
3008 self.expect_token(&Token::RBracket)?;
3009 MapAccessKey {
3010 key,
3011 syntax: MapAccessSyntax::Bracket,
3012 }
3013 }
3014 Token::Period if dialect_of!(self is BigQueryDialect) => {
3018 self.next_token(); MapAccessKey {
3020 key: self.parse_expr()?,
3021 syntax: MapAccessSyntax::Period,
3022 }
3023 }
3024 _ => break,
3025 };
3026 keys.push(key);
3027 }
3028
3029 Ok(Expr::MapAccess {
3030 column: Box::new(expr),
3031 keys,
3032 })
3033 }
3034
3035 pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
3037 if self.parse_keyword(Keyword::UNNEST) {
3040 self.expect_token(&Token::LParen)?;
3041 let array_expr = self.parse_expr()?;
3042 self.expect_token(&Token::RParen)?;
3043 return Ok(Expr::InUnnest {
3044 expr: Box::new(expr),
3045 array_expr: Box::new(array_expr),
3046 negated,
3047 });
3048 }
3049 self.expect_token(&Token::LParen)?;
3050 let in_op = if self.parse_keyword(Keyword::SELECT) || self.parse_keyword(Keyword::WITH) {
3051 self.prev_token();
3052 Expr::InSubquery {
3053 expr: Box::new(expr),
3054 subquery: self.parse_query()?,
3055 negated,
3056 }
3057 } else {
3058 Expr::InList {
3059 expr: Box::new(expr),
3060 list: if self.dialect.supports_in_empty_list() {
3061 self.parse_comma_separated0(Parser::parse_expr, Token::RParen)?
3062 } else {
3063 self.parse_comma_separated(Parser::parse_expr)?
3064 },
3065 negated,
3066 }
3067 };
3068 self.expect_token(&Token::RParen)?;
3069 Ok(in_op)
3070 }
3071
3072 pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
3074 let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
3077 self.expect_keyword(Keyword::AND)?;
3078 let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
3079 Ok(Expr::Between {
3080 expr: Box::new(expr),
3081 negated,
3082 low: Box::new(low),
3083 high: Box::new(high),
3084 })
3085 }
3086
3087 pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3089 Ok(Expr::Cast {
3090 kind: CastKind::DoubleColon,
3091 expr: Box::new(expr),
3092 data_type: self.parse_data_type()?,
3093 format: None,
3094 })
3095 }
3096
3097 pub fn get_next_precedence(&self) -> Result<u8, ParserError> {
3099 self.dialect.get_next_precedence_default(self)
3100 }
3101
3102 pub fn peek_token(&self) -> TokenWithLocation {
3105 self.peek_nth_token(0)
3106 }
3107
3108 pub fn peek_tokens<const N: usize>(&self) -> [Token; N] {
3131 self.peek_tokens_with_location()
3132 .map(|with_loc| with_loc.token)
3133 }
3134
3135 pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithLocation; N] {
3140 let mut index = self.index;
3141 core::array::from_fn(|_| loop {
3142 let token = self.tokens.get(index);
3143 index += 1;
3144 if let Some(TokenWithLocation {
3145 token: Token::Whitespace(_),
3146 location: _,
3147 }) = token
3148 {
3149 continue;
3150 }
3151 break token.cloned().unwrap_or(TokenWithLocation {
3152 token: Token::EOF,
3153 location: Location { line: 0, column: 0 },
3154 });
3155 })
3156 }
3157
3158 pub fn peek_nth_token(&self, mut n: usize) -> TokenWithLocation {
3160 let mut index = self.index;
3161 loop {
3162 index += 1;
3163 match self.tokens.get(index - 1) {
3164 Some(TokenWithLocation {
3165 token: Token::Whitespace(_),
3166 location: _,
3167 }) => continue,
3168 non_whitespace => {
3169 if n == 0 {
3170 return non_whitespace.cloned().unwrap_or(TokenWithLocation {
3171 token: Token::EOF,
3172 location: Location { line: 0, column: 0 },
3173 });
3174 }
3175 n -= 1;
3176 }
3177 }
3178 }
3179 }
3180
3181 pub fn peek_token_no_skip(&self) -> TokenWithLocation {
3184 self.peek_nth_token_no_skip(0)
3185 }
3186
3187 pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithLocation {
3189 self.tokens
3190 .get(self.index + n)
3191 .cloned()
3192 .unwrap_or(TokenWithLocation {
3193 token: Token::EOF,
3194 location: Location { line: 0, column: 0 },
3195 })
3196 }
3197
3198 pub fn next_token(&mut self) -> TokenWithLocation {
3202 loop {
3203 self.index += 1;
3204 match self.tokens.get(self.index - 1) {
3205 Some(TokenWithLocation {
3206 token: Token::Whitespace(_),
3207 location: _,
3208 }) => continue,
3209 token => {
3210 return token
3211 .cloned()
3212 .unwrap_or_else(|| TokenWithLocation::wrap(Token::EOF))
3213 }
3214 }
3215 }
3216 }
3217
3218 pub fn next_token_no_skip(&mut self) -> Option<&TokenWithLocation> {
3220 self.index += 1;
3221 self.tokens.get(self.index - 1)
3222 }
3223
3224 pub fn prev_token(&mut self) {
3228 loop {
3229 assert!(self.index > 0);
3230 self.index -= 1;
3231 if let Some(TokenWithLocation {
3232 token: Token::Whitespace(_),
3233 location: _,
3234 }) = self.tokens.get(self.index)
3235 {
3236 continue;
3237 }
3238 return;
3239 }
3240 }
3241
3242 pub fn expected<T>(&self, expected: &str, found: TokenWithLocation) -> Result<T, ParserError> {
3244 parser_err!(
3245 format!("Expected: {expected}, found: {found}"),
3246 found.location
3247 )
3248 }
3249
3250 #[must_use]
3253 pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
3254 match self.peek_token().token {
3255 Token::Word(w) if expected == w.keyword => {
3256 self.next_token();
3257 true
3258 }
3259 _ => false,
3260 }
3261 }
3262
3263 pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
3271 match self.peek_token().token {
3272 Token::Word(w) if expected == w.keyword => {
3273 for (idx, token) in tokens.iter().enumerate() {
3274 if self.peek_nth_token(idx + 1).token != *token {
3275 return false;
3276 }
3277 }
3278 for _ in 0..(tokens.len() + 1) {
3280 self.next_token();
3281 }
3282 true
3283 }
3284 _ => false,
3285 }
3286 }
3287
3288 #[must_use]
3292 pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
3293 let index = self.index;
3294 for &keyword in keywords {
3295 if !self.parse_keyword(keyword) {
3296 self.index = index;
3299 return false;
3300 }
3301 }
3302 true
3303 }
3304
3305 #[must_use]
3309 pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
3310 match self.peek_token().token {
3311 Token::Word(w) => {
3312 keywords
3313 .iter()
3314 .find(|keyword| **keyword == w.keyword)
3315 .map(|keyword| {
3316 self.next_token();
3317 *keyword
3318 })
3319 }
3320 _ => None,
3321 }
3322 }
3323
3324 pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result<Keyword, ParserError> {
3327 if let Some(keyword) = self.parse_one_of_keywords(keywords) {
3328 Ok(keyword)
3329 } else {
3330 let keywords: Vec<String> = keywords.iter().map(|x| format!("{x:?}")).collect();
3331 self.expected(
3332 &format!("one of {}", keywords.join(" or ")),
3333 self.peek_token(),
3334 )
3335 }
3336 }
3337
3338 pub fn expect_keyword(&mut self, expected: Keyword) -> Result<(), ParserError> {
3341 if self.parse_keyword(expected) {
3342 Ok(())
3343 } else {
3344 self.expected(format!("{:?}", &expected).as_str(), self.peek_token())
3345 }
3346 }
3347
3348 pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> {
3351 for &kw in expected {
3352 self.expect_keyword(kw)?;
3353 }
3354 Ok(())
3355 }
3356
3357 #[must_use]
3359 pub fn consume_token(&mut self, expected: &Token) -> bool {
3360 if self.peek_token() == *expected {
3361 self.next_token();
3362 true
3363 } else {
3364 false
3365 }
3366 }
3367
3368 #[must_use]
3372 pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool {
3373 let index = self.index;
3374 for token in tokens {
3375 if !self.consume_token(token) {
3376 self.index = index;
3377 return false;
3378 }
3379 }
3380 true
3381 }
3382
3383 pub fn expect_token(&mut self, expected: &Token) -> Result<(), ParserError> {
3385 if self.consume_token(expected) {
3386 Ok(())
3387 } else {
3388 self.expected(&expected.to_string(), self.peek_token())
3389 }
3390 }
3391
3392 fn parse<T: FromStr>(s: String, loc: Location) -> Result<T, ParserError>
3393 where
3394 <T as FromStr>::Err: Display,
3395 {
3396 s.parse::<T>().map_err(|e| {
3397 ParserError::ParserError(format!(
3398 "Could not parse '{s}' as {}: {e}{loc}",
3399 core::any::type_name::<T>()
3400 ))
3401 })
3402 }
3403
3404 pub fn parse_projection(&mut self) -> Result<Vec<SelectItem>, ParserError> {
3406 let old_value = self.options.trailing_commas;
3414 self.options.trailing_commas |= self.dialect.supports_projection_trailing_commas();
3415
3416 let ret = self.parse_comma_separated(|p| p.parse_select_item());
3417 self.options.trailing_commas = old_value;
3418
3419 ret
3420 }
3421
3422 pub fn parse_actions_list(&mut self) -> Result<Vec<ParsedAction>, ParserError> {
3423 let mut values = vec![];
3424 loop {
3425 values.push(self.parse_grant_permission()?);
3426 if !self.consume_token(&Token::Comma) {
3427 break;
3428 } else if self.options.trailing_commas {
3429 match self.peek_token().token {
3430 Token::Word(kw) if kw.keyword == Keyword::ON => {
3431 break;
3432 }
3433 Token::RParen
3434 | Token::SemiColon
3435 | Token::EOF
3436 | Token::RBracket
3437 | Token::RBrace => break,
3438 _ => continue,
3439 }
3440 }
3441 }
3442 Ok(values)
3443 }
3444
3445 fn is_parse_comma_separated_end(&mut self) -> bool {
3448 if !self.consume_token(&Token::Comma) {
3449 true
3450 } else if self.options.trailing_commas {
3451 let token = self.peek_token().token;
3452 match token {
3453 Token::Word(ref kw)
3454 if keywords::RESERVED_FOR_COLUMN_ALIAS.contains(&kw.keyword) =>
3455 {
3456 true
3457 }
3458 Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => {
3459 true
3460 }
3461 _ => false,
3462 }
3463 } else {
3464 false
3465 }
3466 }
3467
3468 pub fn parse_comma_separated<T, F>(&mut self, mut f: F) -> Result<Vec<T>, ParserError>
3470 where
3471 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
3472 {
3473 let mut values = vec![];
3474 loop {
3475 values.push(f(self)?);
3476 if self.is_parse_comma_separated_end() {
3477 break;
3478 }
3479 }
3480 Ok(values)
3481 }
3482
3483 pub fn parse_keyword_separated<T, F>(
3485 &mut self,
3486 keyword: Keyword,
3487 mut f: F,
3488 ) -> Result<Vec<T>, ParserError>
3489 where
3490 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
3491 {
3492 let mut values = vec![];
3493 loop {
3494 values.push(f(self)?);
3495 if !self.parse_keyword(keyword) {
3496 break;
3497 }
3498 }
3499 Ok(values)
3500 }
3501
3502 pub fn parse_parenthesized<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
3503 where
3504 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
3505 {
3506 self.expect_token(&Token::LParen)?;
3507 let res = f(self)?;
3508 self.expect_token(&Token::RParen)?;
3509 Ok(res)
3510 }
3511
3512 pub fn parse_comma_separated0<T, F>(
3515 &mut self,
3516 f: F,
3517 end_token: Token,
3518 ) -> Result<Vec<T>, ParserError>
3519 where
3520 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
3521 {
3522 if self.peek_token().token == end_token {
3523 return Ok(vec![]);
3524 }
3525
3526 if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] {
3527 let _ = self.consume_token(&Token::Comma);
3528 return Ok(vec![]);
3529 }
3530
3531 self.parse_comma_separated(f)
3532 }
3533
3534 pub fn maybe_parse<T, F>(&mut self, mut f: F) -> Result<Option<T>, ParserError>
3536 where
3537 F: FnMut(&mut Parser) -> Result<T, ParserError>,
3538 {
3539 let index = self.index;
3540 match f(self) {
3541 Ok(t) => Ok(Some(t)),
3542 Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded),
3544 Err(_) => {
3545 self.index = index;
3546 Ok(None)
3547 }
3548 }
3549 }
3550
3551 pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
3554 let loc = self.peek_token().location;
3555 let all = self.parse_keyword(Keyword::ALL);
3556 let distinct = self.parse_keyword(Keyword::DISTINCT);
3557 if !distinct {
3558 return Ok(None);
3559 }
3560 if all {
3561 return parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc);
3562 }
3563 let on = self.parse_keyword(Keyword::ON);
3564 if !on {
3565 return Ok(Some(Distinct::Distinct));
3566 }
3567
3568 self.expect_token(&Token::LParen)?;
3569 let col_names = if self.consume_token(&Token::RParen) {
3570 self.prev_token();
3571 Vec::new()
3572 } else {
3573 self.parse_comma_separated(Parser::parse_expr)?
3574 };
3575 self.expect_token(&Token::RParen)?;
3576 Ok(Some(Distinct::On(col_names)))
3577 }
3578
3579 pub fn parse_create(&mut self) -> Result<Statement, ParserError> {
3581 let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
3582 let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]);
3583 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
3584 let global = self.parse_one_of_keywords(&[Keyword::GLOBAL]).is_some();
3585 let transient = self.parse_one_of_keywords(&[Keyword::TRANSIENT]).is_some();
3586 let global: Option<bool> = if global {
3587 Some(true)
3588 } else if local {
3589 Some(false)
3590 } else {
3591 None
3592 };
3593 let temporary = self
3594 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
3595 .is_some();
3596 let persistent = dialect_of!(self is DuckDbDialect)
3597 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
3598 if self.parse_keyword(Keyword::TABLE) {
3599 self.parse_create_table(or_replace, temporary, global, transient)
3600 } else if self.parse_keyword(Keyword::MATERIALIZED) || self.parse_keyword(Keyword::VIEW) {
3601 self.prev_token();
3602 self.parse_create_view(or_replace, temporary)
3603 } else if self.parse_keyword(Keyword::POLICY) {
3604 self.parse_create_policy()
3605 } else if self.parse_keyword(Keyword::EXTERNAL) {
3606 self.parse_create_external_table(or_replace)
3607 } else if self.parse_keyword(Keyword::FUNCTION) {
3608 self.parse_create_function(or_replace, temporary)
3609 } else if self.parse_keyword(Keyword::TRIGGER) {
3610 self.parse_create_trigger(or_replace, false)
3611 } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) {
3612 self.parse_create_trigger(or_replace, true)
3613 } else if self.parse_keyword(Keyword::MACRO) {
3614 self.parse_create_macro(or_replace, temporary)
3615 } else if self.parse_keyword(Keyword::SECRET) {
3616 self.parse_create_secret(or_replace, temporary, persistent)
3617 } else if or_replace {
3618 self.expected(
3619 "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION after CREATE OR REPLACE",
3620 self.peek_token(),
3621 )
3622 } else if self.parse_keyword(Keyword::EXTENSION) {
3623 self.parse_create_extension()
3624 } else if self.parse_keyword(Keyword::INDEX) {
3625 self.parse_create_index(false)
3626 } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
3627 self.parse_create_index(true)
3628 } else if self.parse_keyword(Keyword::VIRTUAL) {
3629 self.parse_create_virtual_table()
3630 } else if self.parse_keyword(Keyword::SCHEMA) {
3631 self.parse_create_schema()
3632 } else if self.parse_keyword(Keyword::DATABASE) {
3633 self.parse_create_database()
3634 } else if self.parse_keyword(Keyword::ROLE) {
3635 self.parse_create_role()
3636 } else if self.parse_keyword(Keyword::SEQUENCE) {
3637 self.parse_create_sequence(temporary)
3638 } else if self.parse_keyword(Keyword::TYPE) {
3639 self.parse_create_type()
3640 } else if self.parse_keyword(Keyword::PROCEDURE) {
3641 self.parse_create_procedure(or_alter)
3642 } else {
3643 self.expected("an object type after CREATE", self.peek_token())
3644 }
3645 }
3646
3647 pub fn parse_create_secret(
3649 &mut self,
3650 or_replace: bool,
3651 temporary: bool,
3652 persistent: bool,
3653 ) -> Result<Statement, ParserError> {
3654 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
3655
3656 let mut storage_specifier = None;
3657 let mut name = None;
3658 if self.peek_token() != Token::LParen {
3659 if self.parse_keyword(Keyword::IN) {
3660 storage_specifier = self.parse_identifier(false).ok()
3661 } else {
3662 name = self.parse_identifier(false).ok();
3663 }
3664
3665 if storage_specifier.is_none()
3667 && self.peek_token() != Token::LParen
3668 && self.parse_keyword(Keyword::IN)
3669 {
3670 storage_specifier = self.parse_identifier(false).ok();
3671 }
3672 }
3673
3674 self.expect_token(&Token::LParen)?;
3675 self.expect_keyword(Keyword::TYPE)?;
3676 let secret_type = self.parse_identifier(false)?;
3677
3678 let mut options = Vec::new();
3679 if self.consume_token(&Token::Comma) {
3680 options.append(&mut self.parse_comma_separated(|p| {
3681 let key = p.parse_identifier(false)?;
3682 let value = p.parse_identifier(false)?;
3683 Ok(SecretOption { key, value })
3684 })?);
3685 }
3686 self.expect_token(&Token::RParen)?;
3687
3688 let temp = match (temporary, persistent) {
3689 (true, false) => Some(true),
3690 (false, true) => Some(false),
3691 (false, false) => None,
3692 _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
3693 };
3694
3695 Ok(Statement::CreateSecret {
3696 or_replace,
3697 temporary: temp,
3698 if_not_exists,
3699 name,
3700 storage_specifier,
3701 secret_type,
3702 options,
3703 })
3704 }
3705
3706 pub fn parse_cache_table(&mut self) -> Result<Statement, ParserError> {
3708 let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None);
3709 if self.parse_keyword(Keyword::TABLE) {
3710 let table_name = self.parse_object_name(false)?;
3711 if self.peek_token().token != Token::EOF {
3712 if let Token::Word(word) = self.peek_token().token {
3713 if word.keyword == Keyword::OPTIONS {
3714 options = self.parse_options(Keyword::OPTIONS)?
3715 }
3716 };
3717
3718 if self.peek_token().token != Token::EOF {
3719 let (a, q) = self.parse_as_query()?;
3720 has_as = a;
3721 query = Some(q);
3722 }
3723
3724 Ok(Statement::Cache {
3725 table_flag,
3726 table_name,
3727 has_as,
3728 options,
3729 query,
3730 })
3731 } else {
3732 Ok(Statement::Cache {
3733 table_flag,
3734 table_name,
3735 has_as,
3736 options,
3737 query,
3738 })
3739 }
3740 } else {
3741 table_flag = Some(self.parse_object_name(false)?);
3742 if self.parse_keyword(Keyword::TABLE) {
3743 let table_name = self.parse_object_name(false)?;
3744 if self.peek_token() != Token::EOF {
3745 if let Token::Word(word) = self.peek_token().token {
3746 if word.keyword == Keyword::OPTIONS {
3747 options = self.parse_options(Keyword::OPTIONS)?
3748 }
3749 };
3750
3751 if self.peek_token() != Token::EOF {
3752 let (a, q) = self.parse_as_query()?;
3753 has_as = a;
3754 query = Some(q);
3755 }
3756
3757 Ok(Statement::Cache {
3758 table_flag,
3759 table_name,
3760 has_as,
3761 options,
3762 query,
3763 })
3764 } else {
3765 Ok(Statement::Cache {
3766 table_flag,
3767 table_name,
3768 has_as,
3769 options,
3770 query,
3771 })
3772 }
3773 } else {
3774 if self.peek_token() == Token::EOF {
3775 self.prev_token();
3776 }
3777 self.expected("a `TABLE` keyword", self.peek_token())
3778 }
3779 }
3780 }
3781
3782 pub fn parse_as_query(&mut self) -> Result<(bool, Box<Query>), ParserError> {
3784 match self.peek_token().token {
3785 Token::Word(word) => match word.keyword {
3786 Keyword::AS => {
3787 self.next_token();
3788 Ok((true, self.parse_query()?))
3789 }
3790 _ => Ok((false, self.parse_query()?)),
3791 },
3792 _ => self.expected("a QUERY statement", self.peek_token()),
3793 }
3794 }
3795
3796 pub fn parse_uncache_table(&mut self) -> Result<Statement, ParserError> {
3798 self.expect_keyword(Keyword::TABLE)?;
3799 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
3800 let table_name = self.parse_object_name(false)?;
3801 Ok(Statement::UNCache {
3802 table_name,
3803 if_exists,
3804 })
3805 }
3806
3807 pub fn parse_create_virtual_table(&mut self) -> Result<Statement, ParserError> {
3809 self.expect_keyword(Keyword::TABLE)?;
3810 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
3811 let table_name = self.parse_object_name(false)?;
3812 self.expect_keyword(Keyword::USING)?;
3813 let module_name = self.parse_identifier(false)?;
3814 let module_args = self.parse_parenthesized_column_list(Optional, false)?;
3819 Ok(Statement::CreateVirtualTable {
3820 name: table_name,
3821 if_not_exists,
3822 module_name,
3823 module_args,
3824 })
3825 }
3826
3827 pub fn parse_create_schema(&mut self) -> Result<Statement, ParserError> {
3828 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
3829
3830 let schema_name = self.parse_schema_name()?;
3831
3832 Ok(Statement::CreateSchema {
3833 schema_name,
3834 if_not_exists,
3835 })
3836 }
3837
3838 fn parse_schema_name(&mut self) -> Result<SchemaName, ParserError> {
3839 if self.parse_keyword(Keyword::AUTHORIZATION) {
3840 Ok(SchemaName::UnnamedAuthorization(
3841 self.parse_identifier(false)?,
3842 ))
3843 } else {
3844 let name = self.parse_object_name(false)?;
3845
3846 if self.parse_keyword(Keyword::AUTHORIZATION) {
3847 Ok(SchemaName::NamedAuthorization(
3848 name,
3849 self.parse_identifier(false)?,
3850 ))
3851 } else {
3852 Ok(SchemaName::Simple(name))
3853 }
3854 }
3855 }
3856
3857 pub fn parse_create_database(&mut self) -> Result<Statement, ParserError> {
3858 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
3859 let db_name = self.parse_object_name(false)?;
3860 let mut location = None;
3861 let mut managed_location = None;
3862 loop {
3863 match self.parse_one_of_keywords(&[Keyword::LOCATION, Keyword::MANAGEDLOCATION]) {
3864 Some(Keyword::LOCATION) => location = Some(self.parse_literal_string()?),
3865 Some(Keyword::MANAGEDLOCATION) => {
3866 managed_location = Some(self.parse_literal_string()?)
3867 }
3868 _ => break,
3869 }
3870 }
3871 Ok(Statement::CreateDatabase {
3872 db_name,
3873 if_not_exists: ine,
3874 location,
3875 managed_location,
3876 })
3877 }
3878
3879 pub fn parse_optional_create_function_using(
3880 &mut self,
3881 ) -> Result<Option<CreateFunctionUsing>, ParserError> {
3882 if !self.parse_keyword(Keyword::USING) {
3883 return Ok(None);
3884 };
3885 let keyword =
3886 self.expect_one_of_keywords(&[Keyword::JAR, Keyword::FILE, Keyword::ARCHIVE])?;
3887
3888 let uri = self.parse_literal_string()?;
3889
3890 match keyword {
3891 Keyword::JAR => Ok(Some(CreateFunctionUsing::Jar(uri))),
3892 Keyword::FILE => Ok(Some(CreateFunctionUsing::File(uri))),
3893 Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
3894 _ => self.expected(
3895 "JAR, FILE or ARCHIVE, got {:?}",
3896 TokenWithLocation::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
3897 ),
3898 }
3899 }
3900
3901 pub fn parse_create_function(
3902 &mut self,
3903 or_replace: bool,
3904 temporary: bool,
3905 ) -> Result<Statement, ParserError> {
3906 if dialect_of!(self is HiveDialect) {
3907 self.parse_hive_create_function(or_replace, temporary)
3908 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
3909 self.parse_postgres_create_function(or_replace, temporary)
3910 } else if dialect_of!(self is DuckDbDialect) {
3911 self.parse_create_macro(or_replace, temporary)
3912 } else if dialect_of!(self is BigQueryDialect) {
3913 self.parse_bigquery_create_function(or_replace, temporary)
3914 } else {
3915 self.prev_token();
3916 self.expected("an object type after CREATE", self.peek_token())
3917 }
3918 }
3919
3920 fn parse_postgres_create_function(
3924 &mut self,
3925 or_replace: bool,
3926 temporary: bool,
3927 ) -> Result<Statement, ParserError> {
3928 let name = self.parse_object_name(false)?;
3929 self.expect_token(&Token::LParen)?;
3930 let args = if self.consume_token(&Token::RParen) {
3931 self.prev_token();
3932 None
3933 } else {
3934 Some(self.parse_comma_separated(Parser::parse_function_arg)?)
3935 };
3936
3937 self.expect_token(&Token::RParen)?;
3938
3939 let return_type = if self.parse_keyword(Keyword::RETURNS) {
3940 Some(self.parse_data_type()?)
3941 } else {
3942 None
3943 };
3944
3945 #[derive(Default)]
3946 struct Body {
3947 language: Option<Ident>,
3948 behavior: Option<FunctionBehavior>,
3949 function_body: Option<CreateFunctionBody>,
3950 called_on_null: Option<FunctionCalledOnNull>,
3951 parallel: Option<FunctionParallel>,
3952 }
3953 let mut body = Body::default();
3954 loop {
3955 fn ensure_not_set<T>(field: &Option<T>, name: &str) -> Result<(), ParserError> {
3956 if field.is_some() {
3957 return Err(ParserError::ParserError(format!(
3958 "{name} specified more than once",
3959 )));
3960 }
3961 Ok(())
3962 }
3963 if self.parse_keyword(Keyword::AS) {
3964 ensure_not_set(&body.function_body, "AS")?;
3965 body.function_body = Some(CreateFunctionBody::AsBeforeOptions(
3966 self.parse_create_function_body_string()?,
3967 ));
3968 } else if self.parse_keyword(Keyword::LANGUAGE) {
3969 ensure_not_set(&body.language, "LANGUAGE")?;
3970 body.language = Some(self.parse_identifier(false)?);
3971 } else if self.parse_keyword(Keyword::IMMUTABLE) {
3972 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
3973 body.behavior = Some(FunctionBehavior::Immutable);
3974 } else if self.parse_keyword(Keyword::STABLE) {
3975 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
3976 body.behavior = Some(FunctionBehavior::Stable);
3977 } else if self.parse_keyword(Keyword::VOLATILE) {
3978 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
3979 body.behavior = Some(FunctionBehavior::Volatile);
3980 } else if self.parse_keywords(&[
3981 Keyword::CALLED,
3982 Keyword::ON,
3983 Keyword::NULL,
3984 Keyword::INPUT,
3985 ]) {
3986 ensure_not_set(
3987 &body.called_on_null,
3988 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
3989 )?;
3990 body.called_on_null = Some(FunctionCalledOnNull::CalledOnNullInput);
3991 } else if self.parse_keywords(&[
3992 Keyword::RETURNS,
3993 Keyword::NULL,
3994 Keyword::ON,
3995 Keyword::NULL,
3996 Keyword::INPUT,
3997 ]) {
3998 ensure_not_set(
3999 &body.called_on_null,
4000 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
4001 )?;
4002 body.called_on_null = Some(FunctionCalledOnNull::ReturnsNullOnNullInput);
4003 } else if self.parse_keyword(Keyword::STRICT) {
4004 ensure_not_set(
4005 &body.called_on_null,
4006 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
4007 )?;
4008 body.called_on_null = Some(FunctionCalledOnNull::Strict);
4009 } else if self.parse_keyword(Keyword::PARALLEL) {
4010 ensure_not_set(&body.parallel, "PARALLEL { UNSAFE | RESTRICTED | SAFE }")?;
4011 if self.parse_keyword(Keyword::UNSAFE) {
4012 body.parallel = Some(FunctionParallel::Unsafe);
4013 } else if self.parse_keyword(Keyword::RESTRICTED) {
4014 body.parallel = Some(FunctionParallel::Restricted);
4015 } else if self.parse_keyword(Keyword::SAFE) {
4016 body.parallel = Some(FunctionParallel::Safe);
4017 } else {
4018 return self.expected("one of UNSAFE | RESTRICTED | SAFE", self.peek_token());
4019 }
4020 } else if self.parse_keyword(Keyword::RETURN) {
4021 ensure_not_set(&body.function_body, "RETURN")?;
4022 body.function_body = Some(CreateFunctionBody::Return(self.parse_expr()?));
4023 } else {
4024 break;
4025 }
4026 }
4027
4028 Ok(Statement::CreateFunction {
4029 or_replace,
4030 temporary,
4031 name,
4032 args,
4033 return_type,
4034 behavior: body.behavior,
4035 called_on_null: body.called_on_null,
4036 parallel: body.parallel,
4037 language: body.language,
4038 function_body: body.function_body,
4039 if_not_exists: false,
4040 using: None,
4041 determinism_specifier: None,
4042 options: None,
4043 remote_connection: None,
4044 })
4045 }
4046
4047 fn parse_hive_create_function(
4051 &mut self,
4052 or_replace: bool,
4053 temporary: bool,
4054 ) -> Result<Statement, ParserError> {
4055 let name = self.parse_object_name(false)?;
4056 self.expect_keyword(Keyword::AS)?;
4057
4058 let as_ = self.parse_create_function_body_string()?;
4059 let using = self.parse_optional_create_function_using()?;
4060
4061 Ok(Statement::CreateFunction {
4062 or_replace,
4063 temporary,
4064 name,
4065 function_body: Some(CreateFunctionBody::AsBeforeOptions(as_)),
4066 using,
4067 if_not_exists: false,
4068 args: None,
4069 return_type: None,
4070 behavior: None,
4071 called_on_null: None,
4072 parallel: None,
4073 language: None,
4074 determinism_specifier: None,
4075 options: None,
4076 remote_connection: None,
4077 })
4078 }
4079
4080 fn parse_bigquery_create_function(
4084 &mut self,
4085 or_replace: bool,
4086 temporary: bool,
4087 ) -> Result<Statement, ParserError> {
4088 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4089 let name = self.parse_object_name(false)?;
4090
4091 let parse_function_param =
4092 |parser: &mut Parser| -> Result<OperateFunctionArg, ParserError> {
4093 let name = parser.parse_identifier(false)?;
4094 let data_type = parser.parse_data_type()?;
4095 Ok(OperateFunctionArg {
4096 mode: None,
4097 name: Some(name),
4098 data_type,
4099 default_expr: None,
4100 })
4101 };
4102 self.expect_token(&Token::LParen)?;
4103 let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?;
4104 self.expect_token(&Token::RParen)?;
4105
4106 let return_type = if self.parse_keyword(Keyword::RETURNS) {
4107 Some(self.parse_data_type()?)
4108 } else {
4109 None
4110 };
4111
4112 let determinism_specifier = if self.parse_keyword(Keyword::DETERMINISTIC) {
4113 Some(FunctionDeterminismSpecifier::Deterministic)
4114 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DETERMINISTIC]) {
4115 Some(FunctionDeterminismSpecifier::NotDeterministic)
4116 } else {
4117 None
4118 };
4119
4120 let language = if self.parse_keyword(Keyword::LANGUAGE) {
4121 Some(self.parse_identifier(false)?)
4122 } else {
4123 None
4124 };
4125
4126 let remote_connection =
4127 if self.parse_keywords(&[Keyword::REMOTE, Keyword::WITH, Keyword::CONNECTION]) {
4128 Some(self.parse_object_name(false)?)
4129 } else {
4130 None
4131 };
4132
4133 let mut options = self.maybe_parse_options(Keyword::OPTIONS)?;
4136
4137 let function_body = if remote_connection.is_none() {
4138 self.expect_keyword(Keyword::AS)?;
4139 let expr = self.parse_expr()?;
4140 if options.is_none() {
4141 options = self.maybe_parse_options(Keyword::OPTIONS)?;
4142 Some(CreateFunctionBody::AsBeforeOptions(expr))
4143 } else {
4144 Some(CreateFunctionBody::AsAfterOptions(expr))
4145 }
4146 } else {
4147 None
4148 };
4149
4150 Ok(Statement::CreateFunction {
4151 or_replace,
4152 temporary,
4153 if_not_exists,
4154 name,
4155 args: Some(args),
4156 return_type,
4157 function_body,
4158 language,
4159 determinism_specifier,
4160 options,
4161 remote_connection,
4162 using: None,
4163 behavior: None,
4164 called_on_null: None,
4165 parallel: None,
4166 })
4167 }
4168
4169 fn parse_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
4170 let mode = if self.parse_keyword(Keyword::IN) {
4171 Some(ArgMode::In)
4172 } else if self.parse_keyword(Keyword::OUT) {
4173 Some(ArgMode::Out)
4174 } else if self.parse_keyword(Keyword::INOUT) {
4175 Some(ArgMode::InOut)
4176 } else {
4177 None
4178 };
4179
4180 let mut name = None;
4182 let mut data_type = self.parse_data_type()?;
4183 if let DataType::Custom(n, _) = &data_type {
4184 name = Some(n.0[0].clone());
4186 data_type = self.parse_data_type()?;
4187 }
4188
4189 let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq)
4190 {
4191 Some(self.parse_expr()?)
4192 } else {
4193 None
4194 };
4195 Ok(OperateFunctionArg {
4196 mode,
4197 name,
4198 data_type,
4199 default_expr,
4200 })
4201 }
4202
4203 pub fn parse_drop_trigger(&mut self) -> Result<Statement, ParserError> {
4209 if !dialect_of!(self is PostgreSqlDialect | GenericDialect) {
4210 self.prev_token();
4211 return self.expected("an object type after DROP", self.peek_token());
4212 }
4213 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
4214 let trigger_name = self.parse_object_name(false)?;
4215 self.expect_keyword(Keyword::ON)?;
4216 let table_name = self.parse_object_name(false)?;
4217 let option = self
4218 .parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT])
4219 .map(|keyword| match keyword {
4220 Keyword::CASCADE => ReferentialAction::Cascade,
4221 Keyword::RESTRICT => ReferentialAction::Restrict,
4222 _ => unreachable!(),
4223 });
4224 Ok(Statement::DropTrigger {
4225 if_exists,
4226 trigger_name,
4227 table_name,
4228 option,
4229 })
4230 }
4231
4232 pub fn parse_create_trigger(
4233 &mut self,
4234 or_replace: bool,
4235 is_constraint: bool,
4236 ) -> Result<Statement, ParserError> {
4237 if !dialect_of!(self is PostgreSqlDialect | GenericDialect) {
4238 self.prev_token();
4239 return self.expected("an object type after CREATE", self.peek_token());
4240 }
4241
4242 let name = self.parse_object_name(false)?;
4243 let period = self.parse_trigger_period()?;
4244
4245 let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?;
4246 self.expect_keyword(Keyword::ON)?;
4247 let table_name = self.parse_object_name(false)?;
4248
4249 let referenced_table_name = if self.parse_keyword(Keyword::FROM) {
4250 self.parse_object_name(true).ok()
4251 } else {
4252 None
4253 };
4254
4255 let characteristics = self.parse_constraint_characteristics()?;
4256
4257 let mut referencing = vec![];
4258 if self.parse_keyword(Keyword::REFERENCING) {
4259 while let Some(refer) = self.parse_trigger_referencing()? {
4260 referencing.push(refer);
4261 }
4262 }
4263
4264 self.expect_keyword(Keyword::FOR)?;
4265 let include_each = self.parse_keyword(Keyword::EACH);
4266 let trigger_object =
4267 match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? {
4268 Keyword::ROW => TriggerObject::Row,
4269 Keyword::STATEMENT => TriggerObject::Statement,
4270 _ => unreachable!(),
4271 };
4272
4273 let condition = self
4274 .parse_keyword(Keyword::WHEN)
4275 .then(|| self.parse_expr())
4276 .transpose()?;
4277
4278 self.expect_keyword(Keyword::EXECUTE)?;
4279
4280 let exec_body = self.parse_trigger_exec_body()?;
4281
4282 Ok(Statement::CreateTrigger {
4283 or_replace,
4284 is_constraint,
4285 name,
4286 period,
4287 events,
4288 table_name,
4289 referenced_table_name,
4290 referencing,
4291 trigger_object,
4292 include_each,
4293 condition,
4294 exec_body,
4295 characteristics,
4296 })
4297 }
4298
4299 pub fn parse_trigger_period(&mut self) -> Result<TriggerPeriod, ParserError> {
4300 Ok(
4301 match self.expect_one_of_keywords(&[
4302 Keyword::BEFORE,
4303 Keyword::AFTER,
4304 Keyword::INSTEAD,
4305 ])? {
4306 Keyword::BEFORE => TriggerPeriod::Before,
4307 Keyword::AFTER => TriggerPeriod::After,
4308 Keyword::INSTEAD => self
4309 .expect_keyword(Keyword::OF)
4310 .map(|_| TriggerPeriod::InsteadOf)?,
4311 _ => unreachable!(),
4312 },
4313 )
4314 }
4315
4316 pub fn parse_trigger_event(&mut self) -> Result<TriggerEvent, ParserError> {
4317 Ok(
4318 match self.expect_one_of_keywords(&[
4319 Keyword::INSERT,
4320 Keyword::UPDATE,
4321 Keyword::DELETE,
4322 Keyword::TRUNCATE,
4323 ])? {
4324 Keyword::INSERT => TriggerEvent::Insert,
4325 Keyword::UPDATE => {
4326 if self.parse_keyword(Keyword::OF) {
4327 let cols = self.parse_comma_separated(|ident| {
4328 Parser::parse_identifier(ident, false)
4329 })?;
4330 TriggerEvent::Update(cols)
4331 } else {
4332 TriggerEvent::Update(vec![])
4333 }
4334 }
4335 Keyword::DELETE => TriggerEvent::Delete,
4336 Keyword::TRUNCATE => TriggerEvent::Truncate,
4337 _ => unreachable!(),
4338 },
4339 )
4340 }
4341
4342 pub fn parse_trigger_referencing(&mut self) -> Result<Option<TriggerReferencing>, ParserError> {
4343 let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) {
4344 Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => {
4345 TriggerReferencingType::OldTable
4346 }
4347 Some(Keyword::NEW) if self.parse_keyword(Keyword::TABLE) => {
4348 TriggerReferencingType::NewTable
4349 }
4350 _ => {
4351 return Ok(None);
4352 }
4353 };
4354
4355 let is_as = self.parse_keyword(Keyword::AS);
4356 let transition_relation_name = self.parse_object_name(false)?;
4357 Ok(Some(TriggerReferencing {
4358 refer_type,
4359 is_as,
4360 transition_relation_name,
4361 }))
4362 }
4363
4364 pub fn parse_trigger_exec_body(&mut self) -> Result<TriggerExecBody, ParserError> {
4365 Ok(TriggerExecBody {
4366 exec_type: match self
4367 .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])?
4368 {
4369 Keyword::FUNCTION => TriggerExecBodyType::Function,
4370 Keyword::PROCEDURE => TriggerExecBodyType::Procedure,
4371 _ => unreachable!(),
4372 },
4373 func_desc: self.parse_function_desc()?,
4374 })
4375 }
4376
4377 pub fn parse_create_macro(
4378 &mut self,
4379 or_replace: bool,
4380 temporary: bool,
4381 ) -> Result<Statement, ParserError> {
4382 if dialect_of!(self is DuckDbDialect | GenericDialect) {
4383 let name = self.parse_object_name(false)?;
4384 self.expect_token(&Token::LParen)?;
4385 let args = if self.consume_token(&Token::RParen) {
4386 self.prev_token();
4387 None
4388 } else {
4389 Some(self.parse_comma_separated(Parser::parse_macro_arg)?)
4390 };
4391
4392 self.expect_token(&Token::RParen)?;
4393 self.expect_keyword(Keyword::AS)?;
4394
4395 Ok(Statement::CreateMacro {
4396 or_replace,
4397 temporary,
4398 name,
4399 args,
4400 definition: if self.parse_keyword(Keyword::TABLE) {
4401 MacroDefinition::Table(self.parse_query()?)
4402 } else {
4403 MacroDefinition::Expr(self.parse_expr()?)
4404 },
4405 })
4406 } else {
4407 self.prev_token();
4408 self.expected("an object type after CREATE", self.peek_token())
4409 }
4410 }
4411
4412 fn parse_macro_arg(&mut self) -> Result<MacroArg, ParserError> {
4413 let name = self.parse_identifier(false)?;
4414
4415 let default_expr =
4416 if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) {
4417 Some(self.parse_expr()?)
4418 } else {
4419 None
4420 };
4421 Ok(MacroArg { name, default_expr })
4422 }
4423
4424 pub fn parse_create_external_table(
4425 &mut self,
4426 or_replace: bool,
4427 ) -> Result<Statement, ParserError> {
4428 self.expect_keyword(Keyword::TABLE)?;
4429 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4430 let table_name = self.parse_object_name(false)?;
4431 let (columns, constraints) = self.parse_columns()?;
4432
4433 let hive_distribution = self.parse_hive_distribution()?;
4434 let hive_formats = self.parse_hive_formats()?;
4435
4436 let file_format = if let Some(ff) = &hive_formats.storage {
4437 match ff {
4438 HiveIOFormat::FileFormat { format } => Some(*format),
4439 _ => None,
4440 }
4441 } else {
4442 None
4443 };
4444 let location = hive_formats.location.clone();
4445 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
4446 Ok(CreateTableBuilder::new(table_name)
4447 .columns(columns)
4448 .constraints(constraints)
4449 .hive_distribution(hive_distribution)
4450 .hive_formats(Some(hive_formats))
4451 .table_properties(table_properties)
4452 .or_replace(or_replace)
4453 .if_not_exists(if_not_exists)
4454 .external(true)
4455 .file_format(file_format)
4456 .location(location)
4457 .build())
4458 }
4459
4460 pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
4461 let next_token = self.next_token();
4462 match &next_token.token {
4463 Token::Word(w) => match w.keyword {
4464 Keyword::AVRO => Ok(FileFormat::AVRO),
4465 Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
4466 Keyword::ORC => Ok(FileFormat::ORC),
4467 Keyword::PARQUET => Ok(FileFormat::PARQUET),
4468 Keyword::RCFILE => Ok(FileFormat::RCFILE),
4469 Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
4470 Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
4471 _ => self.expected("fileformat", next_token),
4472 },
4473 _ => self.expected("fileformat", next_token),
4474 }
4475 }
4476
4477 pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
4478 let next_token = self.next_token();
4479 match &next_token.token {
4480 Token::Word(w) => match w.keyword {
4481 Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
4482 Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
4483 Keyword::JSON => Ok(AnalyzeFormat::JSON),
4484 _ => self.expected("fileformat", next_token),
4485 },
4486 _ => self.expected("fileformat", next_token),
4487 }
4488 }
4489
4490 pub fn parse_create_view(
4491 &mut self,
4492 or_replace: bool,
4493 temporary: bool,
4494 ) -> Result<Statement, ParserError> {
4495 let materialized = self.parse_keyword(Keyword::MATERIALIZED);
4496 self.expect_keyword(Keyword::VIEW)?;
4497 let if_not_exists = dialect_of!(self is BigQueryDialect|SQLiteDialect|GenericDialect)
4498 && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4499 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
4502 let name = self.parse_object_name(allow_unquoted_hyphen)?;
4503 let columns = self.parse_view_columns()?;
4504 let mut options = CreateTableOptions::None;
4505 let with_options = self.parse_options(Keyword::WITH)?;
4506 if !with_options.is_empty() {
4507 options = CreateTableOptions::With(with_options);
4508 }
4509
4510 let cluster_by = if self.parse_keyword(Keyword::CLUSTER) {
4511 self.expect_keyword(Keyword::BY)?;
4512 self.parse_parenthesized_column_list(Optional, false)?
4513 } else {
4514 vec![]
4515 };
4516
4517 if dialect_of!(self is BigQueryDialect | GenericDialect) {
4518 if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
4519 if !opts.is_empty() {
4520 options = CreateTableOptions::Options(opts);
4521 }
4522 };
4523 }
4524
4525 let to = if dialect_of!(self is ClickHouseDialect | GenericDialect)
4526 && self.parse_keyword(Keyword::TO)
4527 {
4528 Some(self.parse_object_name(false)?)
4529 } else {
4530 None
4531 };
4532
4533 let comment = if dialect_of!(self is SnowflakeDialect | GenericDialect)
4534 && self.parse_keyword(Keyword::COMMENT)
4535 {
4536 self.expect_token(&Token::Eq)?;
4537 let next_token = self.next_token();
4538 match next_token.token {
4539 Token::SingleQuotedString(str) => Some(str),
4540 _ => self.expected("string literal", next_token)?,
4541 }
4542 } else {
4543 None
4544 };
4545
4546 self.expect_keyword(Keyword::AS)?;
4547 let query = self.parse_query()?;
4548 let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
4551 && self.parse_keywords(&[
4552 Keyword::WITH,
4553 Keyword::NO,
4554 Keyword::SCHEMA,
4555 Keyword::BINDING,
4556 ]);
4557
4558 Ok(Statement::CreateView {
4559 name,
4560 columns,
4561 query,
4562 materialized,
4563 or_replace,
4564 options,
4565 cluster_by,
4566 comment,
4567 with_no_schema_binding,
4568 if_not_exists,
4569 temporary,
4570 to,
4571 })
4572 }
4573
4574 pub fn parse_create_role(&mut self) -> Result<Statement, ParserError> {
4575 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4576 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
4577
4578 let _ = self.parse_keyword(Keyword::WITH); let optional_keywords = if dialect_of!(self is MsSqlDialect) {
4581 vec![Keyword::AUTHORIZATION]
4582 } else if dialect_of!(self is PostgreSqlDialect) {
4583 vec![
4584 Keyword::LOGIN,
4585 Keyword::NOLOGIN,
4586 Keyword::INHERIT,
4587 Keyword::NOINHERIT,
4588 Keyword::BYPASSRLS,
4589 Keyword::NOBYPASSRLS,
4590 Keyword::PASSWORD,
4591 Keyword::CREATEDB,
4592 Keyword::NOCREATEDB,
4593 Keyword::CREATEROLE,
4594 Keyword::NOCREATEROLE,
4595 Keyword::SUPERUSER,
4596 Keyword::NOSUPERUSER,
4597 Keyword::REPLICATION,
4598 Keyword::NOREPLICATION,
4599 Keyword::CONNECTION,
4600 Keyword::VALID,
4601 Keyword::IN,
4602 Keyword::ROLE,
4603 Keyword::ADMIN,
4604 Keyword::USER,
4605 ]
4606 } else {
4607 vec![]
4608 };
4609
4610 let mut authorization_owner = None;
4612 let mut login = None;
4614 let mut inherit = None;
4615 let mut bypassrls = None;
4616 let mut password = None;
4617 let mut create_db = None;
4618 let mut create_role = None;
4619 let mut superuser = None;
4620 let mut replication = None;
4621 let mut connection_limit = None;
4622 let mut valid_until = None;
4623 let mut in_role = vec![];
4624 let mut in_group = vec![];
4625 let mut role = vec![];
4626 let mut user = vec![];
4627 let mut admin = vec![];
4628
4629 while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) {
4630 let loc = self
4631 .tokens
4632 .get(self.index - 1)
4633 .map_or(Location { line: 0, column: 0 }, |t| t.location);
4634 match keyword {
4635 Keyword::AUTHORIZATION => {
4636 if authorization_owner.is_some() {
4637 parser_err!("Found multiple AUTHORIZATION", loc)
4638 } else {
4639 authorization_owner = Some(self.parse_object_name(false)?);
4640 Ok(())
4641 }
4642 }
4643 Keyword::LOGIN | Keyword::NOLOGIN => {
4644 if login.is_some() {
4645 parser_err!("Found multiple LOGIN or NOLOGIN", loc)
4646 } else {
4647 login = Some(keyword == Keyword::LOGIN);
4648 Ok(())
4649 }
4650 }
4651 Keyword::INHERIT | Keyword::NOINHERIT => {
4652 if inherit.is_some() {
4653 parser_err!("Found multiple INHERIT or NOINHERIT", loc)
4654 } else {
4655 inherit = Some(keyword == Keyword::INHERIT);
4656 Ok(())
4657 }
4658 }
4659 Keyword::BYPASSRLS | Keyword::NOBYPASSRLS => {
4660 if bypassrls.is_some() {
4661 parser_err!("Found multiple BYPASSRLS or NOBYPASSRLS", loc)
4662 } else {
4663 bypassrls = Some(keyword == Keyword::BYPASSRLS);
4664 Ok(())
4665 }
4666 }
4667 Keyword::CREATEDB | Keyword::NOCREATEDB => {
4668 if create_db.is_some() {
4669 parser_err!("Found multiple CREATEDB or NOCREATEDB", loc)
4670 } else {
4671 create_db = Some(keyword == Keyword::CREATEDB);
4672 Ok(())
4673 }
4674 }
4675 Keyword::CREATEROLE | Keyword::NOCREATEROLE => {
4676 if create_role.is_some() {
4677 parser_err!("Found multiple CREATEROLE or NOCREATEROLE", loc)
4678 } else {
4679 create_role = Some(keyword == Keyword::CREATEROLE);
4680 Ok(())
4681 }
4682 }
4683 Keyword::SUPERUSER | Keyword::NOSUPERUSER => {
4684 if superuser.is_some() {
4685 parser_err!("Found multiple SUPERUSER or NOSUPERUSER", loc)
4686 } else {
4687 superuser = Some(keyword == Keyword::SUPERUSER);
4688 Ok(())
4689 }
4690 }
4691 Keyword::REPLICATION | Keyword::NOREPLICATION => {
4692 if replication.is_some() {
4693 parser_err!("Found multiple REPLICATION or NOREPLICATION", loc)
4694 } else {
4695 replication = Some(keyword == Keyword::REPLICATION);
4696 Ok(())
4697 }
4698 }
4699 Keyword::PASSWORD => {
4700 if password.is_some() {
4701 parser_err!("Found multiple PASSWORD", loc)
4702 } else {
4703 password = if self.parse_keyword(Keyword::NULL) {
4704 Some(Password::NullPassword)
4705 } else {
4706 Some(Password::Password(Expr::Value(self.parse_value()?)))
4707 };
4708 Ok(())
4709 }
4710 }
4711 Keyword::CONNECTION => {
4712 self.expect_keyword(Keyword::LIMIT)?;
4713 if connection_limit.is_some() {
4714 parser_err!("Found multiple CONNECTION LIMIT", loc)
4715 } else {
4716 connection_limit = Some(Expr::Value(self.parse_number_value()?));
4717 Ok(())
4718 }
4719 }
4720 Keyword::VALID => {
4721 self.expect_keyword(Keyword::UNTIL)?;
4722 if valid_until.is_some() {
4723 parser_err!("Found multiple VALID UNTIL", loc)
4724 } else {
4725 valid_until = Some(Expr::Value(self.parse_value()?));
4726 Ok(())
4727 }
4728 }
4729 Keyword::IN => {
4730 if self.parse_keyword(Keyword::ROLE) {
4731 if !in_role.is_empty() {
4732 parser_err!("Found multiple IN ROLE", loc)
4733 } else {
4734 in_role = self.parse_comma_separated(|p| p.parse_identifier(false))?;
4735 Ok(())
4736 }
4737 } else if self.parse_keyword(Keyword::GROUP) {
4738 if !in_group.is_empty() {
4739 parser_err!("Found multiple IN GROUP", loc)
4740 } else {
4741 in_group = self.parse_comma_separated(|p| p.parse_identifier(false))?;
4742 Ok(())
4743 }
4744 } else {
4745 self.expected("ROLE or GROUP after IN", self.peek_token())
4746 }
4747 }
4748 Keyword::ROLE => {
4749 if !role.is_empty() {
4750 parser_err!("Found multiple ROLE", loc)
4751 } else {
4752 role = self.parse_comma_separated(|p| p.parse_identifier(false))?;
4753 Ok(())
4754 }
4755 }
4756 Keyword::USER => {
4757 if !user.is_empty() {
4758 parser_err!("Found multiple USER", loc)
4759 } else {
4760 user = self.parse_comma_separated(|p| p.parse_identifier(false))?;
4761 Ok(())
4762 }
4763 }
4764 Keyword::ADMIN => {
4765 if !admin.is_empty() {
4766 parser_err!("Found multiple ADMIN", loc)
4767 } else {
4768 admin = self.parse_comma_separated(|p| p.parse_identifier(false))?;
4769 Ok(())
4770 }
4771 }
4772 _ => break,
4773 }?
4774 }
4775
4776 Ok(Statement::CreateRole {
4777 names,
4778 if_not_exists,
4779 login,
4780 inherit,
4781 bypassrls,
4782 password,
4783 create_db,
4784 create_role,
4785 replication,
4786 superuser,
4787 connection_limit,
4788 valid_until,
4789 in_role,
4790 in_group,
4791 role,
4792 user,
4793 admin,
4794 authorization_owner,
4795 })
4796 }
4797
4798 pub fn parse_owner(&mut self) -> Result<Owner, ParserError> {
4799 let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) {
4800 Some(Keyword::CURRENT_USER) => Owner::CurrentUser,
4801 Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole,
4802 Some(Keyword::SESSION_USER) => Owner::SessionUser,
4803 Some(_) => unreachable!(),
4804 None => {
4805 match self.parse_identifier(false) {
4806 Ok(ident) => Owner::Ident(ident),
4807 Err(e) => {
4808 return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}")))
4809 }
4810 }
4811 },
4812 };
4813 Ok(owner)
4814 }
4815
4816 pub fn parse_create_policy(&mut self) -> Result<Statement, ParserError> {
4826 let name = self.parse_identifier(false)?;
4827 self.expect_keyword(Keyword::ON)?;
4828 let table_name = self.parse_object_name(false)?;
4829
4830 let policy_type = if self.parse_keyword(Keyword::AS) {
4831 let keyword =
4832 self.expect_one_of_keywords(&[Keyword::PERMISSIVE, Keyword::RESTRICTIVE])?;
4833 Some(match keyword {
4834 Keyword::PERMISSIVE => CreatePolicyType::Permissive,
4835 Keyword::RESTRICTIVE => CreatePolicyType::Restrictive,
4836 _ => unreachable!(),
4837 })
4838 } else {
4839 None
4840 };
4841
4842 let command = if self.parse_keyword(Keyword::FOR) {
4843 let keyword = self.expect_one_of_keywords(&[
4844 Keyword::ALL,
4845 Keyword::SELECT,
4846 Keyword::INSERT,
4847 Keyword::UPDATE,
4848 Keyword::DELETE,
4849 ])?;
4850 Some(match keyword {
4851 Keyword::ALL => CreatePolicyCommand::All,
4852 Keyword::SELECT => CreatePolicyCommand::Select,
4853 Keyword::INSERT => CreatePolicyCommand::Insert,
4854 Keyword::UPDATE => CreatePolicyCommand::Update,
4855 Keyword::DELETE => CreatePolicyCommand::Delete,
4856 _ => unreachable!(),
4857 })
4858 } else {
4859 None
4860 };
4861
4862 let to = if self.parse_keyword(Keyword::TO) {
4863 Some(self.parse_comma_separated(|p| p.parse_owner())?)
4864 } else {
4865 None
4866 };
4867
4868 let using = if self.parse_keyword(Keyword::USING) {
4869 self.expect_token(&Token::LParen)?;
4870 let expr = self.parse_expr()?;
4871 self.expect_token(&Token::RParen)?;
4872 Some(expr)
4873 } else {
4874 None
4875 };
4876
4877 let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) {
4878 self.expect_token(&Token::LParen)?;
4879 let expr = self.parse_expr()?;
4880 self.expect_token(&Token::RParen)?;
4881 Some(expr)
4882 } else {
4883 None
4884 };
4885
4886 Ok(CreatePolicy {
4887 name,
4888 table_name,
4889 policy_type,
4890 command,
4891 to,
4892 using,
4893 with_check,
4894 })
4895 }
4896
4897 pub fn parse_drop(&mut self) -> Result<Statement, ParserError> {
4898 let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect)
4900 && self.parse_keyword(Keyword::TEMPORARY);
4901 let persistent = dialect_of!(self is DuckDbDialect)
4902 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
4903
4904 let object_type = if self.parse_keyword(Keyword::TABLE) {
4905 ObjectType::Table
4906 } else if self.parse_keyword(Keyword::VIEW) {
4907 ObjectType::View
4908 } else if self.parse_keyword(Keyword::INDEX) {
4909 ObjectType::Index
4910 } else if self.parse_keyword(Keyword::ROLE) {
4911 ObjectType::Role
4912 } else if self.parse_keyword(Keyword::SCHEMA) {
4913 ObjectType::Schema
4914 } else if self.parse_keyword(Keyword::DATABASE) {
4915 ObjectType::Database
4916 } else if self.parse_keyword(Keyword::SEQUENCE) {
4917 ObjectType::Sequence
4918 } else if self.parse_keyword(Keyword::STAGE) {
4919 ObjectType::Stage
4920 } else if self.parse_keyword(Keyword::TYPE) {
4921 ObjectType::Type
4922 } else if self.parse_keyword(Keyword::FUNCTION) {
4923 return self.parse_drop_function();
4924 } else if self.parse_keyword(Keyword::POLICY) {
4925 return self.parse_drop_policy();
4926 } else if self.parse_keyword(Keyword::PROCEDURE) {
4927 return self.parse_drop_procedure();
4928 } else if self.parse_keyword(Keyword::SECRET) {
4929 return self.parse_drop_secret(temporary, persistent);
4930 } else if self.parse_keyword(Keyword::TRIGGER) {
4931 return self.parse_drop_trigger();
4932 } else {
4933 return self.expected(
4934 "TABLE, VIEW, INDEX, ROLE, SCHEMA, DATABASE, FUNCTION, PROCEDURE, STAGE, TRIGGER, SECRET, SEQUENCE, or TYPE after DROP",
4935 self.peek_token(),
4936 );
4937 };
4938 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
4941 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
4942
4943 let loc = self.peek_token().location;
4944 let cascade = self.parse_keyword(Keyword::CASCADE);
4945 let restrict = self.parse_keyword(Keyword::RESTRICT);
4946 let purge = self.parse_keyword(Keyword::PURGE);
4947 if cascade && restrict {
4948 return parser_err!("Cannot specify both CASCADE and RESTRICT in DROP", loc);
4949 }
4950 if object_type == ObjectType::Role && (cascade || restrict || purge) {
4951 return parser_err!(
4952 "Cannot specify CASCADE, RESTRICT, or PURGE in DROP ROLE",
4953 loc
4954 );
4955 }
4956 Ok(Statement::Drop {
4957 object_type,
4958 if_exists,
4959 names,
4960 cascade,
4961 restrict,
4962 purge,
4963 temporary,
4964 })
4965 }
4966
4967 fn parse_optional_referential_action(&mut self) -> Option<ReferentialAction> {
4968 match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
4969 Some(Keyword::CASCADE) => Some(ReferentialAction::Cascade),
4970 Some(Keyword::RESTRICT) => Some(ReferentialAction::Restrict),
4971 _ => None,
4972 }
4973 }
4974
4975 fn parse_drop_function(&mut self) -> Result<Statement, ParserError> {
4980 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
4981 let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
4982 let option = self.parse_optional_referential_action();
4983 Ok(Statement::DropFunction {
4984 if_exists,
4985 func_desc,
4986 option,
4987 })
4988 }
4989
4990 fn parse_drop_policy(&mut self) -> Result<Statement, ParserError> {
4996 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
4997 let name = self.parse_identifier(false)?;
4998 self.expect_keyword(Keyword::ON)?;
4999 let table_name = self.parse_object_name(false)?;
5000 let option = self.parse_optional_referential_action();
5001 Ok(Statement::DropPolicy {
5002 if_exists,
5003 name,
5004 table_name,
5005 option,
5006 })
5007 }
5008
5009 fn parse_drop_procedure(&mut self) -> Result<Statement, ParserError> {
5014 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5015 let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
5016 let option = self.parse_optional_referential_action();
5017 Ok(Statement::DropProcedure {
5018 if_exists,
5019 proc_desc,
5020 option,
5021 })
5022 }
5023
5024 fn parse_function_desc(&mut self) -> Result<FunctionDesc, ParserError> {
5025 let name = self.parse_object_name(false)?;
5026
5027 let args = if self.consume_token(&Token::LParen) {
5028 if self.consume_token(&Token::RParen) {
5029 None
5030 } else {
5031 let args = self.parse_comma_separated(Parser::parse_function_arg)?;
5032 self.expect_token(&Token::RParen)?;
5033 Some(args)
5034 }
5035 } else {
5036 None
5037 };
5038
5039 Ok(FunctionDesc { name, args })
5040 }
5041
5042 fn parse_drop_secret(
5044 &mut self,
5045 temporary: bool,
5046 persistent: bool,
5047 ) -> Result<Statement, ParserError> {
5048 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5049 let name = self.parse_identifier(false)?;
5050 let storage_specifier = if self.parse_keyword(Keyword::FROM) {
5051 self.parse_identifier(false).ok()
5052 } else {
5053 None
5054 };
5055 let temp = match (temporary, persistent) {
5056 (true, false) => Some(true),
5057 (false, true) => Some(false),
5058 (false, false) => None,
5059 _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
5060 };
5061
5062 Ok(Statement::DropSecret {
5063 if_exists,
5064 temporary: temp,
5065 name,
5066 storage_specifier,
5067 })
5068 }
5069
5070 pub fn parse_declare(&mut self) -> Result<Statement, ParserError> {
5080 if dialect_of!(self is BigQueryDialect) {
5081 return self.parse_big_query_declare();
5082 }
5083 if dialect_of!(self is SnowflakeDialect) {
5084 return self.parse_snowflake_declare();
5085 }
5086 if dialect_of!(self is MsSqlDialect) {
5087 return self.parse_mssql_declare();
5088 }
5089
5090 let name = self.parse_identifier(false)?;
5091
5092 let binary = Some(self.parse_keyword(Keyword::BINARY));
5093 let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) {
5094 Some(true)
5095 } else if self.parse_keyword(Keyword::ASENSITIVE) {
5096 Some(false)
5097 } else {
5098 None
5099 };
5100 let scroll = if self.parse_keyword(Keyword::SCROLL) {
5101 Some(true)
5102 } else if self.parse_keywords(&[Keyword::NO, Keyword::SCROLL]) {
5103 Some(false)
5104 } else {
5105 None
5106 };
5107
5108 self.expect_keyword(Keyword::CURSOR)?;
5109 let declare_type = Some(DeclareType::Cursor);
5110
5111 let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
5112 Some(keyword) => {
5113 self.expect_keyword(Keyword::HOLD)?;
5114
5115 match keyword {
5116 Keyword::WITH => Some(true),
5117 Keyword::WITHOUT => Some(false),
5118 _ => unreachable!(),
5119 }
5120 }
5121 None => None,
5122 };
5123
5124 self.expect_keyword(Keyword::FOR)?;
5125
5126 let query = Some(self.parse_query()?);
5127
5128 Ok(Statement::Declare {
5129 stmts: vec![Declare {
5130 names: vec![name],
5131 data_type: None,
5132 assignment: None,
5133 declare_type,
5134 binary,
5135 sensitive,
5136 scroll,
5137 hold,
5138 for_query: query,
5139 }],
5140 })
5141 }
5142
5143 pub fn parse_big_query_declare(&mut self) -> Result<Statement, ParserError> {
5151 let names = self.parse_comma_separated(|parser| Parser::parse_identifier(parser, false))?;
5152
5153 let data_type = match self.peek_token().token {
5154 Token::Word(w) if w.keyword == Keyword::DEFAULT => None,
5155 _ => Some(self.parse_data_type()?),
5156 };
5157
5158 let expr = if data_type.is_some() {
5159 if self.parse_keyword(Keyword::DEFAULT) {
5160 Some(self.parse_expr()?)
5161 } else {
5162 None
5163 }
5164 } else {
5165 self.expect_keyword(Keyword::DEFAULT)?;
5168 Some(self.parse_expr()?)
5169 };
5170
5171 Ok(Statement::Declare {
5172 stmts: vec![Declare {
5173 names,
5174 data_type,
5175 assignment: expr.map(|expr| DeclareAssignment::Default(Box::new(expr))),
5176 declare_type: None,
5177 binary: None,
5178 sensitive: None,
5179 scroll: None,
5180 hold: None,
5181 for_query: None,
5182 }],
5183 })
5184 }
5185
5186 pub fn parse_snowflake_declare(&mut self) -> Result<Statement, ParserError> {
5211 let mut stmts = vec![];
5212 loop {
5213 let name = self.parse_identifier(false)?;
5214 let (declare_type, for_query, assigned_expr, data_type) =
5215 if self.parse_keyword(Keyword::CURSOR) {
5216 self.expect_keyword(Keyword::FOR)?;
5217 match self.peek_token().token {
5218 Token::Word(w) if w.keyword == Keyword::SELECT => (
5219 Some(DeclareType::Cursor),
5220 Some(self.parse_query()?),
5221 None,
5222 None,
5223 ),
5224 _ => (
5225 Some(DeclareType::Cursor),
5226 None,
5227 Some(DeclareAssignment::For(Box::new(self.parse_expr()?))),
5228 None,
5229 ),
5230 }
5231 } else if self.parse_keyword(Keyword::RESULTSET) {
5232 let assigned_expr = if self.peek_token().token != Token::SemiColon {
5233 self.parse_snowflake_variable_declaration_expression()?
5234 } else {
5235 None
5237 };
5238
5239 (Some(DeclareType::ResultSet), None, assigned_expr, None)
5240 } else if self.parse_keyword(Keyword::EXCEPTION) {
5241 let assigned_expr = if self.peek_token().token == Token::LParen {
5242 Some(DeclareAssignment::Expr(Box::new(self.parse_expr()?)))
5243 } else {
5244 None
5246 };
5247
5248 (Some(DeclareType::Exception), None, assigned_expr, None)
5249 } else {
5250 let (assigned_expr, data_type) = if let Some(assigned_expr) =
5252 self.parse_snowflake_variable_declaration_expression()?
5253 {
5254 (Some(assigned_expr), None)
5255 } else if let Token::Word(_) = self.peek_token().token {
5256 let data_type = self.parse_data_type()?;
5257 (
5258 self.parse_snowflake_variable_declaration_expression()?,
5259 Some(data_type),
5260 )
5261 } else {
5262 (None, None)
5263 };
5264 (None, None, assigned_expr, data_type)
5265 };
5266 let stmt = Declare {
5267 names: vec![name],
5268 data_type,
5269 assignment: assigned_expr,
5270 declare_type,
5271 binary: None,
5272 sensitive: None,
5273 scroll: None,
5274 hold: None,
5275 for_query,
5276 };
5277
5278 stmts.push(stmt);
5279 if self.consume_token(&Token::SemiColon) {
5280 match self.peek_token().token {
5281 Token::Word(w)
5282 if ALL_KEYWORDS
5283 .binary_search(&w.value.to_uppercase().as_str())
5284 .is_err() =>
5285 {
5286 continue;
5288 }
5289 _ => {
5290 self.prev_token();
5292 }
5293 }
5294 }
5295
5296 break;
5297 }
5298
5299 Ok(Statement::Declare { stmts })
5300 }
5301
5302 pub fn parse_mssql_declare(&mut self) -> Result<Statement, ParserError> {
5314 let mut stmts = vec![];
5315
5316 loop {
5317 let name = {
5318 let ident = self.parse_identifier(false)?;
5319 if !ident.value.starts_with('@') {
5320 Err(ParserError::TokenizerError(
5321 "Invalid MsSql variable declaration.".to_string(),
5322 ))
5323 } else {
5324 Ok(ident)
5325 }
5326 }?;
5327
5328 let (declare_type, data_type) = match self.peek_token().token {
5329 Token::Word(w) => match w.keyword {
5330 Keyword::CURSOR => {
5331 self.next_token();
5332 (Some(DeclareType::Cursor), None)
5333 }
5334 Keyword::AS => {
5335 self.next_token();
5336 (None, Some(self.parse_data_type()?))
5337 }
5338 _ => (None, Some(self.parse_data_type()?)),
5339 },
5340 _ => (None, Some(self.parse_data_type()?)),
5341 };
5342
5343 let assignment = self.parse_mssql_variable_declaration_expression()?;
5344
5345 stmts.push(Declare {
5346 names: vec![name],
5347 data_type,
5348 assignment,
5349 declare_type,
5350 binary: None,
5351 sensitive: None,
5352 scroll: None,
5353 hold: None,
5354 for_query: None,
5355 });
5356
5357 if self.next_token() != Token::Comma {
5358 break;
5359 }
5360 }
5361
5362 Ok(Statement::Declare { stmts })
5363 }
5364
5365 pub fn parse_snowflake_variable_declaration_expression(
5373 &mut self,
5374 ) -> Result<Option<DeclareAssignment>, ParserError> {
5375 Ok(match self.peek_token().token {
5376 Token::Word(w) if w.keyword == Keyword::DEFAULT => {
5377 self.next_token(); Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
5379 }
5380 Token::Assignment => {
5381 self.next_token(); Some(DeclareAssignment::DuckAssignment(Box::new(
5383 self.parse_expr()?,
5384 )))
5385 }
5386 _ => None,
5387 })
5388 }
5389
5390 pub fn parse_mssql_variable_declaration_expression(
5397 &mut self,
5398 ) -> Result<Option<DeclareAssignment>, ParserError> {
5399 Ok(match self.peek_token().token {
5400 Token::Eq => {
5401 self.next_token(); Some(DeclareAssignment::MsSqlAssignment(Box::new(
5403 self.parse_expr()?,
5404 )))
5405 }
5406 _ => None,
5407 })
5408 }
5409
5410 pub fn parse_fetch_statement(&mut self) -> Result<Statement, ParserError> {
5412 let direction = if self.parse_keyword(Keyword::NEXT) {
5413 FetchDirection::Next
5414 } else if self.parse_keyword(Keyword::PRIOR) {
5415 FetchDirection::Prior
5416 } else if self.parse_keyword(Keyword::FIRST) {
5417 FetchDirection::First
5418 } else if self.parse_keyword(Keyword::LAST) {
5419 FetchDirection::Last
5420 } else if self.parse_keyword(Keyword::ABSOLUTE) {
5421 FetchDirection::Absolute {
5422 limit: self.parse_number_value()?,
5423 }
5424 } else if self.parse_keyword(Keyword::RELATIVE) {
5425 FetchDirection::Relative {
5426 limit: self.parse_number_value()?,
5427 }
5428 } else if self.parse_keyword(Keyword::FORWARD) {
5429 if self.parse_keyword(Keyword::ALL) {
5430 FetchDirection::ForwardAll
5431 } else {
5432 FetchDirection::Forward {
5433 limit: Some(self.parse_number_value()?),
5435 }
5436 }
5437 } else if self.parse_keyword(Keyword::BACKWARD) {
5438 if self.parse_keyword(Keyword::ALL) {
5439 FetchDirection::BackwardAll
5440 } else {
5441 FetchDirection::Backward {
5442 limit: Some(self.parse_number_value()?),
5444 }
5445 }
5446 } else if self.parse_keyword(Keyword::ALL) {
5447 FetchDirection::All
5448 } else {
5449 FetchDirection::Count {
5450 limit: self.parse_number_value()?,
5451 }
5452 };
5453
5454 self.expect_one_of_keywords(&[Keyword::FROM, Keyword::IN])?;
5455
5456 let name = self.parse_identifier(false)?;
5457
5458 let into = if self.parse_keyword(Keyword::INTO) {
5459 Some(self.parse_object_name(false)?)
5460 } else {
5461 None
5462 };
5463
5464 Ok(Statement::Fetch {
5465 name,
5466 direction,
5467 into,
5468 })
5469 }
5470
5471 pub fn parse_discard(&mut self) -> Result<Statement, ParserError> {
5472 let object_type = if self.parse_keyword(Keyword::ALL) {
5473 DiscardObject::ALL
5474 } else if self.parse_keyword(Keyword::PLANS) {
5475 DiscardObject::PLANS
5476 } else if self.parse_keyword(Keyword::SEQUENCES) {
5477 DiscardObject::SEQUENCES
5478 } else if self.parse_keyword(Keyword::TEMP) || self.parse_keyword(Keyword::TEMPORARY) {
5479 DiscardObject::TEMP
5480 } else {
5481 return self.expected(
5482 "ALL, PLANS, SEQUENCES, TEMP or TEMPORARY after DISCARD",
5483 self.peek_token(),
5484 );
5485 };
5486 Ok(Statement::Discard { object_type })
5487 }
5488
5489 pub fn parse_create_index(&mut self, unique: bool) -> Result<Statement, ParserError> {
5490 let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
5491 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5492 let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) {
5493 let index_name = self.parse_object_name(false)?;
5494 self.expect_keyword(Keyword::ON)?;
5495 Some(index_name)
5496 } else {
5497 None
5498 };
5499 let table_name = self.parse_object_name(false)?;
5500 let using = if self.parse_keyword(Keyword::USING) {
5501 Some(self.parse_identifier(false)?)
5502 } else {
5503 None
5504 };
5505 self.expect_token(&Token::LParen)?;
5506 let columns = self.parse_comma_separated(Parser::parse_order_by_expr)?;
5507 self.expect_token(&Token::RParen)?;
5508
5509 let include = if self.parse_keyword(Keyword::INCLUDE) {
5510 self.expect_token(&Token::LParen)?;
5511 let columns = self.parse_comma_separated(|p| p.parse_identifier(false))?;
5512 self.expect_token(&Token::RParen)?;
5513 columns
5514 } else {
5515 vec![]
5516 };
5517
5518 let nulls_distinct = if self.parse_keyword(Keyword::NULLS) {
5519 let not = self.parse_keyword(Keyword::NOT);
5520 self.expect_keyword(Keyword::DISTINCT)?;
5521 Some(!not)
5522 } else {
5523 None
5524 };
5525
5526 let with = if self.dialect.supports_create_index_with_clause()
5527 && self.parse_keyword(Keyword::WITH)
5528 {
5529 self.expect_token(&Token::LParen)?;
5530 let with_params = self.parse_comma_separated(Parser::parse_expr)?;
5531 self.expect_token(&Token::RParen)?;
5532 with_params
5533 } else {
5534 Vec::new()
5535 };
5536
5537 let predicate = if self.parse_keyword(Keyword::WHERE) {
5538 Some(self.parse_expr()?)
5539 } else {
5540 None
5541 };
5542
5543 Ok(Statement::CreateIndex(CreateIndex {
5544 name: index_name,
5545 table_name,
5546 using,
5547 columns,
5548 unique,
5549 concurrently,
5550 if_not_exists,
5551 include,
5552 nulls_distinct,
5553 with,
5554 predicate,
5555 }))
5556 }
5557
5558 pub fn parse_create_extension(&mut self) -> Result<Statement, ParserError> {
5559 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5560 let name = self.parse_identifier(false)?;
5561
5562 let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) {
5563 let schema = if self.parse_keyword(Keyword::SCHEMA) {
5564 Some(self.parse_identifier(false)?)
5565 } else {
5566 None
5567 };
5568
5569 let version = if self.parse_keyword(Keyword::VERSION) {
5570 Some(self.parse_identifier(false)?)
5571 } else {
5572 None
5573 };
5574
5575 let cascade = self.parse_keyword(Keyword::CASCADE);
5576
5577 (schema, version, cascade)
5578 } else {
5579 (None, None, false)
5580 };
5581
5582 Ok(Statement::CreateExtension {
5583 name,
5584 if_not_exists,
5585 schema,
5586 version,
5587 cascade,
5588 })
5589 }
5590
5591 pub fn parse_hive_distribution(&mut self) -> Result<HiveDistributionStyle, ParserError> {
5593 if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) {
5594 self.expect_token(&Token::LParen)?;
5595 let columns = self.parse_comma_separated(Parser::parse_column_def)?;
5596 self.expect_token(&Token::RParen)?;
5597 Ok(HiveDistributionStyle::PARTITIONED { columns })
5598 } else {
5599 Ok(HiveDistributionStyle::NONE)
5600 }
5601 }
5602
5603 pub fn parse_hive_formats(&mut self) -> Result<HiveFormat, ParserError> {
5604 let mut hive_format = HiveFormat::default();
5605 loop {
5606 match self.parse_one_of_keywords(&[
5607 Keyword::ROW,
5608 Keyword::STORED,
5609 Keyword::LOCATION,
5610 Keyword::WITH,
5611 ]) {
5612 Some(Keyword::ROW) => {
5613 hive_format.row_format = Some(self.parse_row_format()?);
5614 }
5615 Some(Keyword::STORED) => {
5616 self.expect_keyword(Keyword::AS)?;
5617 if self.parse_keyword(Keyword::INPUTFORMAT) {
5618 let input_format = self.parse_expr()?;
5619 self.expect_keyword(Keyword::OUTPUTFORMAT)?;
5620 let output_format = self.parse_expr()?;
5621 hive_format.storage = Some(HiveIOFormat::IOF {
5622 input_format,
5623 output_format,
5624 });
5625 } else {
5626 let format = self.parse_file_format()?;
5627 hive_format.storage = Some(HiveIOFormat::FileFormat { format });
5628 }
5629 }
5630 Some(Keyword::LOCATION) => {
5631 hive_format.location = Some(self.parse_literal_string()?);
5632 }
5633 Some(Keyword::WITH) => {
5634 self.prev_token();
5635 let properties = self
5636 .parse_options_with_keywords(&[Keyword::WITH, Keyword::SERDEPROPERTIES])?;
5637 if !properties.is_empty() {
5638 hive_format.serde_properties = Some(properties);
5639 } else {
5640 break;
5641 }
5642 }
5643 None => break,
5644 _ => break,
5645 }
5646 }
5647
5648 Ok(hive_format)
5649 }
5650
5651 pub fn parse_row_format(&mut self) -> Result<HiveRowFormat, ParserError> {
5652 self.expect_keyword(Keyword::FORMAT)?;
5653 match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) {
5654 Some(Keyword::SERDE) => {
5655 let class = self.parse_literal_string()?;
5656 Ok(HiveRowFormat::SERDE { class })
5657 }
5658 _ => {
5659 let mut row_delimiters = vec![];
5660
5661 loop {
5662 match self.parse_one_of_keywords(&[
5663 Keyword::FIELDS,
5664 Keyword::COLLECTION,
5665 Keyword::MAP,
5666 Keyword::LINES,
5667 Keyword::NULL,
5668 ]) {
5669 Some(Keyword::FIELDS) => {
5670 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
5671 row_delimiters.push(HiveRowDelimiter {
5672 delimiter: HiveDelimiter::FieldsTerminatedBy,
5673 char: self.parse_identifier(false)?,
5674 });
5675
5676 if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
5677 row_delimiters.push(HiveRowDelimiter {
5678 delimiter: HiveDelimiter::FieldsEscapedBy,
5679 char: self.parse_identifier(false)?,
5680 });
5681 }
5682 } else {
5683 break;
5684 }
5685 }
5686 Some(Keyword::COLLECTION) => {
5687 if self.parse_keywords(&[
5688 Keyword::ITEMS,
5689 Keyword::TERMINATED,
5690 Keyword::BY,
5691 ]) {
5692 row_delimiters.push(HiveRowDelimiter {
5693 delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
5694 char: self.parse_identifier(false)?,
5695 });
5696 } else {
5697 break;
5698 }
5699 }
5700 Some(Keyword::MAP) => {
5701 if self.parse_keywords(&[
5702 Keyword::KEYS,
5703 Keyword::TERMINATED,
5704 Keyword::BY,
5705 ]) {
5706 row_delimiters.push(HiveRowDelimiter {
5707 delimiter: HiveDelimiter::MapKeysTerminatedBy,
5708 char: self.parse_identifier(false)?,
5709 });
5710 } else {
5711 break;
5712 }
5713 }
5714 Some(Keyword::LINES) => {
5715 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
5716 row_delimiters.push(HiveRowDelimiter {
5717 delimiter: HiveDelimiter::LinesTerminatedBy,
5718 char: self.parse_identifier(false)?,
5719 });
5720 } else {
5721 break;
5722 }
5723 }
5724 Some(Keyword::NULL) => {
5725 if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) {
5726 row_delimiters.push(HiveRowDelimiter {
5727 delimiter: HiveDelimiter::NullDefinedAs,
5728 char: self.parse_identifier(false)?,
5729 });
5730 } else {
5731 break;
5732 }
5733 }
5734 _ => {
5735 break;
5736 }
5737 }
5738 }
5739
5740 Ok(HiveRowFormat::DELIMITED {
5741 delimiters: row_delimiters,
5742 })
5743 }
5744 }
5745 }
5746
5747 fn parse_optional_on_cluster(&mut self) -> Result<Option<Ident>, ParserError> {
5748 if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) {
5749 Ok(Some(self.parse_identifier(false)?))
5750 } else {
5751 Ok(None)
5752 }
5753 }
5754
5755 pub fn parse_create_table(
5756 &mut self,
5757 or_replace: bool,
5758 temporary: bool,
5759 global: Option<bool>,
5760 transient: bool,
5761 ) -> Result<Statement, ParserError> {
5762 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
5763 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5764 let table_name = self.parse_object_name(allow_unquoted_hyphen)?;
5765
5766 let on_cluster = self.parse_optional_on_cluster()?;
5768
5769 let like = if self.parse_keyword(Keyword::LIKE) || self.parse_keyword(Keyword::ILIKE) {
5770 self.parse_object_name(allow_unquoted_hyphen).ok()
5771 } else {
5772 None
5773 };
5774
5775 let clone = if self.parse_keyword(Keyword::CLONE) {
5776 self.parse_object_name(allow_unquoted_hyphen).ok()
5777 } else {
5778 None
5779 };
5780
5781 let (columns, constraints) = self.parse_columns()?;
5783 let mut comment = if dialect_of!(self is HiveDialect)
5784 && self.parse_keyword(Keyword::COMMENT)
5785 {
5786 let next_token = self.next_token();
5787 match next_token.token {
5788 Token::SingleQuotedString(str) => Some(CommentDef::AfterColumnDefsWithoutEq(str)),
5789 _ => self.expected("comment", next_token)?,
5790 }
5791 } else {
5792 None
5793 };
5794
5795 let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]);
5797
5798 let hive_distribution = self.parse_hive_distribution()?;
5799 let clustered_by = self.parse_optional_clustered_by()?;
5800 let hive_formats = self.parse_hive_formats()?;
5801 let with_options = self.parse_options(Keyword::WITH)?;
5803 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
5804
5805 let engine = if self.parse_keyword(Keyword::ENGINE) {
5806 self.expect_token(&Token::Eq)?;
5807 let next_token = self.next_token();
5808 match next_token.token {
5809 Token::Word(w) => {
5810 let name = w.value;
5811 let parameters = if self.peek_token() == Token::LParen {
5812 Some(self.parse_parenthesized_identifiers()?)
5813 } else {
5814 None
5815 };
5816 Some(TableEngine { name, parameters })
5817 }
5818 _ => self.expected("identifier", next_token)?,
5819 }
5820 } else {
5821 None
5822 };
5823
5824 let auto_increment_offset = if self.parse_keyword(Keyword::AUTO_INCREMENT) {
5825 let _ = self.consume_token(&Token::Eq);
5826 let next_token = self.next_token();
5827 match next_token.token {
5828 Token::Number(s, _) => Some(Self::parse::<u32>(s, next_token.location)?),
5829 _ => self.expected("literal int", next_token)?,
5830 }
5831 } else {
5832 None
5833 };
5834
5835 let primary_key = if dialect_of!(self is ClickHouseDialect | GenericDialect)
5838 && self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
5839 {
5840 Some(Box::new(self.parse_expr()?))
5841 } else {
5842 None
5843 };
5844
5845 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
5846 if self.consume_token(&Token::LParen) {
5847 let columns = if self.peek_token() != Token::RParen {
5848 self.parse_comma_separated(|p| p.parse_expr())?
5849 } else {
5850 vec![]
5851 };
5852 self.expect_token(&Token::RParen)?;
5853 Some(OneOrManyWithParens::Many(columns))
5854 } else {
5855 Some(OneOrManyWithParens::One(self.parse_expr()?))
5856 }
5857 } else {
5858 None
5859 };
5860
5861 let create_table_config = self.parse_optional_create_table_config()?;
5862
5863 let default_charset = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) {
5864 self.expect_token(&Token::Eq)?;
5865 let next_token = self.next_token();
5866 match next_token.token {
5867 Token::Word(w) => Some(w.value),
5868 _ => self.expected("identifier", next_token)?,
5869 }
5870 } else {
5871 None
5872 };
5873
5874 let collation = if self.parse_keywords(&[Keyword::COLLATE]) {
5875 self.expect_token(&Token::Eq)?;
5876 let next_token = self.next_token();
5877 match next_token.token {
5878 Token::Word(w) => Some(w.value),
5879 _ => self.expected("identifier", next_token)?,
5880 }
5881 } else {
5882 None
5883 };
5884
5885 let on_commit: Option<OnCommit> =
5886 if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT, Keyword::DELETE, Keyword::ROWS])
5887 {
5888 Some(OnCommit::DeleteRows)
5889 } else if self.parse_keywords(&[
5890 Keyword::ON,
5891 Keyword::COMMIT,
5892 Keyword::PRESERVE,
5893 Keyword::ROWS,
5894 ]) {
5895 Some(OnCommit::PreserveRows)
5896 } else if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT, Keyword::DROP]) {
5897 Some(OnCommit::Drop)
5898 } else {
5899 None
5900 };
5901
5902 let strict = self.parse_keyword(Keyword::STRICT);
5903
5904 if !dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) {
5906 self.prev_token();
5908 comment = self.parse_optional_inline_comment()?
5909 };
5910
5911 let query = if self.parse_keyword(Keyword::AS) {
5913 Some(self.parse_query()?)
5914 } else {
5915 None
5916 };
5917
5918 Ok(CreateTableBuilder::new(table_name)
5919 .temporary(temporary)
5920 .columns(columns)
5921 .constraints(constraints)
5922 .with_options(with_options)
5923 .table_properties(table_properties)
5924 .or_replace(or_replace)
5925 .if_not_exists(if_not_exists)
5926 .transient(transient)
5927 .hive_distribution(hive_distribution)
5928 .hive_formats(Some(hive_formats))
5929 .global(global)
5930 .query(query)
5931 .without_rowid(without_rowid)
5932 .like(like)
5933 .clone_clause(clone)
5934 .engine(engine)
5935 .comment(comment)
5936 .auto_increment_offset(auto_increment_offset)
5937 .order_by(order_by)
5938 .default_charset(default_charset)
5939 .collation(collation)
5940 .on_commit(on_commit)
5941 .on_cluster(on_cluster)
5942 .clustered_by(clustered_by)
5943 .partition_by(create_table_config.partition_by)
5944 .cluster_by(create_table_config.cluster_by)
5945 .options(create_table_config.options)
5946 .primary_key(primary_key)
5947 .strict(strict)
5948 .build())
5949 }
5950
5951 fn parse_optional_create_table_config(
5956 &mut self,
5957 ) -> Result<CreateTableConfiguration, ParserError> {
5958 let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
5959 && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY])
5960 {
5961 Some(Box::new(self.parse_expr()?))
5962 } else {
5963 None
5964 };
5965
5966 let mut cluster_by = None;
5967 let mut options = None;
5968 if dialect_of!(self is BigQueryDialect | GenericDialect) {
5969 if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
5970 cluster_by = Some(WrappedCollection::NoWrapping(
5971 self.parse_comma_separated(|p| p.parse_identifier(false))?,
5972 ));
5973 };
5974
5975 if let Token::Word(word) = self.peek_token().token {
5976 if word.keyword == Keyword::OPTIONS {
5977 options = Some(self.parse_options(Keyword::OPTIONS)?);
5978 }
5979 };
5980 }
5981
5982 Ok(CreateTableConfiguration {
5983 partition_by,
5984 cluster_by,
5985 options,
5986 })
5987 }
5988
5989 pub fn parse_optional_inline_comment(&mut self) -> Result<Option<CommentDef>, ParserError> {
5990 let comment = if self.parse_keyword(Keyword::COMMENT) {
5991 let has_eq = self.consume_token(&Token::Eq);
5992 let next_token = self.next_token();
5993 match next_token.token {
5994 Token::SingleQuotedString(str) => Some(if has_eq {
5995 CommentDef::WithEq(str)
5996 } else {
5997 CommentDef::WithoutEq(str)
5998 }),
5999 _ => self.expected("comment", next_token)?,
6000 }
6001 } else {
6002 None
6003 };
6004 Ok(comment)
6005 }
6006
6007 pub fn parse_optional_procedure_parameters(
6008 &mut self,
6009 ) -> Result<Option<Vec<ProcedureParam>>, ParserError> {
6010 let mut params = vec![];
6011 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
6012 return Ok(Some(params));
6013 }
6014 loop {
6015 if let Token::Word(_) = self.peek_token().token {
6016 params.push(self.parse_procedure_param()?)
6017 }
6018 let comma = self.consume_token(&Token::Comma);
6019 if self.consume_token(&Token::RParen) {
6020 break;
6022 } else if !comma {
6023 return self.expected("',' or ')' after parameter definition", self.peek_token());
6024 }
6025 }
6026 Ok(Some(params))
6027 }
6028
6029 pub fn parse_columns(&mut self) -> Result<(Vec<ColumnDef>, Vec<TableConstraint>), ParserError> {
6030 let mut columns = vec![];
6031 let mut constraints = vec![];
6032 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
6033 return Ok((columns, constraints));
6034 }
6035
6036 loop {
6037 if let Some(constraint) = self.parse_optional_table_constraint()? {
6038 constraints.push(constraint);
6039 } else if let Token::Word(_) = self.peek_token().token {
6040 columns.push(self.parse_column_def()?);
6041 } else {
6042 return self.expected("column name or constraint definition", self.peek_token());
6043 }
6044
6045 let comma = self.consume_token(&Token::Comma);
6046 let rparen = self.peek_token().token == Token::RParen;
6047
6048 if !comma && !rparen {
6049 return self.expected("',' or ')' after column definition", self.peek_token());
6050 };
6051
6052 if rparen && (!comma || self.options.trailing_commas) {
6053 let _ = self.consume_token(&Token::RParen);
6054 break;
6055 }
6056 }
6057
6058 Ok((columns, constraints))
6059 }
6060
6061 pub fn parse_procedure_param(&mut self) -> Result<ProcedureParam, ParserError> {
6062 let name = self.parse_identifier(false)?;
6063 let data_type = self.parse_data_type()?;
6064 Ok(ProcedureParam { name, data_type })
6065 }
6066
6067 pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
6068 let name = self.parse_identifier(false)?;
6069 let data_type = if self.is_column_type_sqlite_unspecified() {
6070 DataType::Unspecified
6071 } else {
6072 self.parse_data_type()?
6073 };
6074 let mut collation = if self.parse_keyword(Keyword::COLLATE) {
6075 Some(self.parse_object_name(false)?)
6076 } else {
6077 None
6078 };
6079 let mut options = vec![];
6080 loop {
6081 if self.parse_keyword(Keyword::CONSTRAINT) {
6082 let name = Some(self.parse_identifier(false)?);
6083 if let Some(option) = self.parse_optional_column_option()? {
6084 options.push(ColumnOptionDef { name, option });
6085 } else {
6086 return self.expected(
6087 "constraint details after CONSTRAINT <name>",
6088 self.peek_token(),
6089 );
6090 }
6091 } else if let Some(option) = self.parse_optional_column_option()? {
6092 options.push(ColumnOptionDef { name: None, option });
6093 } else if dialect_of!(self is MySqlDialect | SnowflakeDialect | GenericDialect)
6094 && self.parse_keyword(Keyword::COLLATE)
6095 {
6096 collation = Some(self.parse_object_name(false)?);
6097 } else {
6098 break;
6099 };
6100 }
6101 Ok(ColumnDef {
6102 name,
6103 data_type,
6104 collation,
6105 options,
6106 })
6107 }
6108
6109 fn is_column_type_sqlite_unspecified(&mut self) -> bool {
6110 if dialect_of!(self is SQLiteDialect) {
6111 match self.peek_token().token {
6112 Token::Word(word) => matches!(
6113 word.keyword,
6114 Keyword::CONSTRAINT
6115 | Keyword::PRIMARY
6116 | Keyword::NOT
6117 | Keyword::UNIQUE
6118 | Keyword::CHECK
6119 | Keyword::DEFAULT
6120 | Keyword::COLLATE
6121 | Keyword::REFERENCES
6122 | Keyword::GENERATED
6123 | Keyword::AS
6124 ),
6125 _ => true, }
6127 } else {
6128 false
6129 }
6130 }
6131
6132 pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
6133 if let Some(option) = self.dialect.parse_column_option(self)? {
6134 return option;
6135 }
6136
6137 if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
6138 Ok(Some(ColumnOption::CharacterSet(
6139 self.parse_object_name(false)?,
6140 )))
6141 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
6142 Ok(Some(ColumnOption::NotNull))
6143 } else if self.parse_keywords(&[Keyword::COMMENT]) {
6144 let next_token = self.next_token();
6145 match next_token.token {
6146 Token::SingleQuotedString(value, ..) => Ok(Some(ColumnOption::Comment(value))),
6147 _ => self.expected("string", next_token),
6148 }
6149 } else if self.parse_keyword(Keyword::NULL) {
6150 Ok(Some(ColumnOption::Null))
6151 } else if self.parse_keyword(Keyword::DEFAULT) {
6152 Ok(Some(ColumnOption::Default(self.parse_expr()?)))
6153 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
6154 && self.parse_keyword(Keyword::MATERIALIZED)
6155 {
6156 Ok(Some(ColumnOption::Materialized(self.parse_expr()?)))
6157 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
6158 && self.parse_keyword(Keyword::ALIAS)
6159 {
6160 Ok(Some(ColumnOption::Alias(self.parse_expr()?)))
6161 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
6162 && self.parse_keyword(Keyword::EPHEMERAL)
6163 {
6164 if matches!(self.peek_token().token, Token::Comma | Token::RParen) {
6167 Ok(Some(ColumnOption::Ephemeral(None)))
6168 } else {
6169 Ok(Some(ColumnOption::Ephemeral(Some(self.parse_expr()?))))
6170 }
6171 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
6172 let characteristics = self.parse_constraint_characteristics()?;
6173 Ok(Some(ColumnOption::Unique {
6174 is_primary: true,
6175 characteristics,
6176 }))
6177 } else if self.parse_keyword(Keyword::UNIQUE) {
6178 let characteristics = self.parse_constraint_characteristics()?;
6179 Ok(Some(ColumnOption::Unique {
6180 is_primary: false,
6181 characteristics,
6182 }))
6183 } else if self.parse_keyword(Keyword::REFERENCES) {
6184 let foreign_table = self.parse_object_name(false)?;
6185 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
6188 let mut on_delete = None;
6189 let mut on_update = None;
6190 loop {
6191 if on_delete.is_none() && self.parse_keywords(&[Keyword::ON, Keyword::DELETE]) {
6192 on_delete = Some(self.parse_referential_action()?);
6193 } else if on_update.is_none()
6194 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
6195 {
6196 on_update = Some(self.parse_referential_action()?);
6197 } else {
6198 break;
6199 }
6200 }
6201 let characteristics = self.parse_constraint_characteristics()?;
6202
6203 Ok(Some(ColumnOption::ForeignKey {
6204 foreign_table,
6205 referred_columns,
6206 on_delete,
6207 on_update,
6208 characteristics,
6209 }))
6210 } else if self.parse_keyword(Keyword::CHECK) {
6211 self.expect_token(&Token::LParen)?;
6212 let expr = self.parse_expr()?;
6213 self.expect_token(&Token::RParen)?;
6214 Ok(Some(ColumnOption::Check(expr)))
6215 } else if self.parse_keyword(Keyword::AUTO_INCREMENT)
6216 && dialect_of!(self is MySqlDialect | GenericDialect)
6217 {
6218 Ok(Some(ColumnOption::DialectSpecific(vec![
6220 Token::make_keyword("AUTO_INCREMENT"),
6221 ])))
6222 } else if self.parse_keyword(Keyword::AUTOINCREMENT)
6223 && dialect_of!(self is SQLiteDialect | GenericDialect)
6224 {
6225 Ok(Some(ColumnOption::DialectSpecific(vec![
6227 Token::make_keyword("AUTOINCREMENT"),
6228 ])))
6229 } else if self.parse_keyword(Keyword::ASC)
6230 && self.dialect.supports_asc_desc_in_column_definition()
6231 {
6232 Ok(Some(ColumnOption::DialectSpecific(vec![
6234 Token::make_keyword("ASC"),
6235 ])))
6236 } else if self.parse_keyword(Keyword::DESC)
6237 && self.dialect.supports_asc_desc_in_column_definition()
6238 {
6239 Ok(Some(ColumnOption::DialectSpecific(vec![
6241 Token::make_keyword("DESC"),
6242 ])))
6243 } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
6244 && dialect_of!(self is MySqlDialect | GenericDialect)
6245 {
6246 let expr = self.parse_expr()?;
6247 Ok(Some(ColumnOption::OnUpdate(expr)))
6248 } else if self.parse_keyword(Keyword::GENERATED) {
6249 self.parse_optional_column_option_generated()
6250 } else if dialect_of!(self is BigQueryDialect | GenericDialect)
6251 && self.parse_keyword(Keyword::OPTIONS)
6252 {
6253 self.prev_token();
6254 Ok(Some(ColumnOption::Options(
6255 self.parse_options(Keyword::OPTIONS)?,
6256 )))
6257 } else if self.parse_keyword(Keyword::AS)
6258 && dialect_of!(self is MySqlDialect | SQLiteDialect | DuckDbDialect | GenericDialect)
6259 {
6260 self.parse_optional_column_option_as()
6261 } else if self.parse_keyword(Keyword::IDENTITY)
6262 && dialect_of!(self is MsSqlDialect | GenericDialect)
6263 {
6264 let parameters = if self.consume_token(&Token::LParen) {
6265 let seed = self.parse_number()?;
6266 self.expect_token(&Token::Comma)?;
6267 let increment = self.parse_number()?;
6268 self.expect_token(&Token::RParen)?;
6269
6270 Some(IdentityPropertyFormatKind::FunctionCall(
6271 IdentityParameters { seed, increment },
6272 ))
6273 } else {
6274 None
6275 };
6276 Ok(Some(ColumnOption::Identity(
6277 IdentityPropertyKind::Identity(IdentityProperty {
6278 parameters,
6279 order: None,
6280 }),
6281 )))
6282 } else if dialect_of!(self is SQLiteDialect | GenericDialect)
6283 && self.parse_keywords(&[Keyword::ON, Keyword::CONFLICT])
6284 {
6285 Ok(Some(ColumnOption::OnConflict(
6287 self.expect_one_of_keywords(&[
6288 Keyword::ROLLBACK,
6289 Keyword::ABORT,
6290 Keyword::FAIL,
6291 Keyword::IGNORE,
6292 Keyword::REPLACE,
6293 ])?,
6294 )))
6295 } else {
6296 Ok(None)
6297 }
6298 }
6299
6300 pub(crate) fn parse_tag(&mut self) -> Result<Tag, ParserError> {
6301 let name = self.parse_identifier(false)?;
6302 self.expect_token(&Token::Eq)?;
6303 let value = self.parse_literal_string()?;
6304
6305 Ok(Tag::new(name, value))
6306 }
6307
6308 fn parse_optional_column_option_generated(
6309 &mut self,
6310 ) -> Result<Option<ColumnOption>, ParserError> {
6311 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) {
6312 let mut sequence_options = vec![];
6313 if self.expect_token(&Token::LParen).is_ok() {
6314 sequence_options = self.parse_create_sequence_options()?;
6315 self.expect_token(&Token::RParen)?;
6316 }
6317 Ok(Some(ColumnOption::Generated {
6318 generated_as: GeneratedAs::Always,
6319 sequence_options: Some(sequence_options),
6320 generation_expr: None,
6321 generation_expr_mode: None,
6322 generated_keyword: true,
6323 }))
6324 } else if self.parse_keywords(&[
6325 Keyword::BY,
6326 Keyword::DEFAULT,
6327 Keyword::AS,
6328 Keyword::IDENTITY,
6329 ]) {
6330 let mut sequence_options = vec![];
6331 if self.expect_token(&Token::LParen).is_ok() {
6332 sequence_options = self.parse_create_sequence_options()?;
6333 self.expect_token(&Token::RParen)?;
6334 }
6335 Ok(Some(ColumnOption::Generated {
6336 generated_as: GeneratedAs::ByDefault,
6337 sequence_options: Some(sequence_options),
6338 generation_expr: None,
6339 generation_expr_mode: None,
6340 generated_keyword: true,
6341 }))
6342 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) {
6343 if self.expect_token(&Token::LParen).is_ok() {
6344 let expr = self.parse_expr()?;
6345 self.expect_token(&Token::RParen)?;
6346 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
6347 Ok((
6348 GeneratedAs::ExpStored,
6349 Some(GeneratedExpressionMode::Stored),
6350 ))
6351 } else if dialect_of!(self is PostgreSqlDialect) {
6352 self.expected("STORED", self.peek_token())
6354 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
6355 Ok((GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual)))
6356 } else {
6357 Ok((GeneratedAs::Always, None))
6358 }?;
6359
6360 Ok(Some(ColumnOption::Generated {
6361 generated_as: gen_as,
6362 sequence_options: None,
6363 generation_expr: Some(expr),
6364 generation_expr_mode: expr_mode,
6365 generated_keyword: true,
6366 }))
6367 } else {
6368 Ok(None)
6369 }
6370 } else {
6371 Ok(None)
6372 }
6373 }
6374
6375 fn parse_optional_column_option_as(&mut self) -> Result<Option<ColumnOption>, ParserError> {
6376 self.expect_token(&Token::LParen)?;
6378 let expr = self.parse_expr()?;
6379 self.expect_token(&Token::RParen)?;
6380
6381 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
6382 (
6383 GeneratedAs::ExpStored,
6384 Some(GeneratedExpressionMode::Stored),
6385 )
6386 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
6387 (GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual))
6388 } else {
6389 (GeneratedAs::Always, None)
6390 };
6391
6392 Ok(Some(ColumnOption::Generated {
6393 generated_as: gen_as,
6394 sequence_options: None,
6395 generation_expr: Some(expr),
6396 generation_expr_mode: expr_mode,
6397 generated_keyword: false,
6398 }))
6399 }
6400
6401 pub fn parse_optional_clustered_by(&mut self) -> Result<Option<ClusteredBy>, ParserError> {
6402 let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect)
6403 && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY])
6404 {
6405 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
6406
6407 let sorted_by = if self.parse_keywords(&[Keyword::SORTED, Keyword::BY]) {
6408 self.expect_token(&Token::LParen)?;
6409 let sorted_by_columns = self.parse_comma_separated(|p| p.parse_order_by_expr())?;
6410 self.expect_token(&Token::RParen)?;
6411 Some(sorted_by_columns)
6412 } else {
6413 None
6414 };
6415
6416 self.expect_keyword(Keyword::INTO)?;
6417 let num_buckets = self.parse_number_value()?;
6418 self.expect_keyword(Keyword::BUCKETS)?;
6419 Some(ClusteredBy {
6420 columns,
6421 sorted_by,
6422 num_buckets,
6423 })
6424 } else {
6425 None
6426 };
6427 Ok(clustered_by)
6428 }
6429
6430 pub fn parse_referential_action(&mut self) -> Result<ReferentialAction, ParserError> {
6431 if self.parse_keyword(Keyword::RESTRICT) {
6432 Ok(ReferentialAction::Restrict)
6433 } else if self.parse_keyword(Keyword::CASCADE) {
6434 Ok(ReferentialAction::Cascade)
6435 } else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
6436 Ok(ReferentialAction::SetNull)
6437 } else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
6438 Ok(ReferentialAction::NoAction)
6439 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
6440 Ok(ReferentialAction::SetDefault)
6441 } else {
6442 self.expected(
6443 "one of RESTRICT, CASCADE, SET NULL, NO ACTION or SET DEFAULT",
6444 self.peek_token(),
6445 )
6446 }
6447 }
6448
6449 pub fn parse_constraint_characteristics(
6450 &mut self,
6451 ) -> Result<Option<ConstraintCharacteristics>, ParserError> {
6452 let mut cc = ConstraintCharacteristics::default();
6453
6454 loop {
6455 if cc.deferrable.is_none() && self.parse_keywords(&[Keyword::NOT, Keyword::DEFERRABLE])
6456 {
6457 cc.deferrable = Some(false);
6458 } else if cc.deferrable.is_none() && self.parse_keyword(Keyword::DEFERRABLE) {
6459 cc.deferrable = Some(true);
6460 } else if cc.initially.is_none() && self.parse_keyword(Keyword::INITIALLY) {
6461 if self.parse_keyword(Keyword::DEFERRED) {
6462 cc.initially = Some(DeferrableInitial::Deferred);
6463 } else if self.parse_keyword(Keyword::IMMEDIATE) {
6464 cc.initially = Some(DeferrableInitial::Immediate);
6465 } else {
6466 self.expected("one of DEFERRED or IMMEDIATE", self.peek_token())?;
6467 }
6468 } else if cc.enforced.is_none() && self.parse_keyword(Keyword::ENFORCED) {
6469 cc.enforced = Some(true);
6470 } else if cc.enforced.is_none()
6471 && self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED])
6472 {
6473 cc.enforced = Some(false);
6474 } else {
6475 break;
6476 }
6477 }
6478
6479 if cc.deferrable.is_some() || cc.initially.is_some() || cc.enforced.is_some() {
6480 Ok(Some(cc))
6481 } else {
6482 Ok(None)
6483 }
6484 }
6485
6486 pub fn parse_optional_table_constraint(
6487 &mut self,
6488 ) -> Result<Option<TableConstraint>, ParserError> {
6489 let name = if self.parse_keyword(Keyword::CONSTRAINT) {
6490 Some(self.parse_identifier(false)?)
6491 } else {
6492 None
6493 };
6494
6495 let next_token = self.next_token();
6496 match next_token.token {
6497 Token::Word(w) if w.keyword == Keyword::UNIQUE => {
6498 let index_type_display = self.parse_index_type_display();
6499 if !dialect_of!(self is GenericDialect | MySqlDialect)
6500 && !index_type_display.is_none()
6501 {
6502 return self
6503 .expected("`index_name` or `(column_name [, ...])`", self.peek_token());
6504 }
6505
6506 let index_name = self.parse_optional_indent()?;
6508 let index_type = self.parse_optional_using_then_index_type()?;
6509
6510 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
6511 let index_options = self.parse_index_options()?;
6512 let characteristics = self.parse_constraint_characteristics()?;
6513 Ok(Some(TableConstraint::Unique {
6514 name,
6515 index_name,
6516 index_type_display,
6517 index_type,
6518 columns,
6519 index_options,
6520 characteristics,
6521 }))
6522 }
6523 Token::Word(w) if w.keyword == Keyword::PRIMARY => {
6524 self.expect_keyword(Keyword::KEY)?;
6526
6527 let index_name = self.parse_optional_indent()?;
6529 let index_type = self.parse_optional_using_then_index_type()?;
6530
6531 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
6532 let index_options = self.parse_index_options()?;
6533 let characteristics = self.parse_constraint_characteristics()?;
6534 Ok(Some(TableConstraint::PrimaryKey {
6535 name,
6536 index_name,
6537 index_type,
6538 columns,
6539 index_options,
6540 characteristics,
6541 }))
6542 }
6543 Token::Word(w) if w.keyword == Keyword::FOREIGN => {
6544 self.expect_keyword(Keyword::KEY)?;
6545 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
6546 self.expect_keyword(Keyword::REFERENCES)?;
6547 let foreign_table = self.parse_object_name(false)?;
6548 let referred_columns = self.parse_parenthesized_column_list(Mandatory, false)?;
6549 let mut on_delete = None;
6550 let mut on_update = None;
6551 loop {
6552 if on_delete.is_none() && self.parse_keywords(&[Keyword::ON, Keyword::DELETE]) {
6553 on_delete = Some(self.parse_referential_action()?);
6554 } else if on_update.is_none()
6555 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
6556 {
6557 on_update = Some(self.parse_referential_action()?);
6558 } else {
6559 break;
6560 }
6561 }
6562
6563 let characteristics = self.parse_constraint_characteristics()?;
6564
6565 Ok(Some(TableConstraint::ForeignKey {
6566 name,
6567 columns,
6568 foreign_table,
6569 referred_columns,
6570 on_delete,
6571 on_update,
6572 characteristics,
6573 }))
6574 }
6575 Token::Word(w) if w.keyword == Keyword::CHECK => {
6576 self.expect_token(&Token::LParen)?;
6577 let expr = Box::new(self.parse_expr()?);
6578 self.expect_token(&Token::RParen)?;
6579 Ok(Some(TableConstraint::Check { name, expr }))
6580 }
6581 Token::Word(w)
6582 if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
6583 && dialect_of!(self is GenericDialect | MySqlDialect)
6584 && name.is_none() =>
6585 {
6586 let display_as_key = w.keyword == Keyword::KEY;
6587
6588 let name = match self.peek_token().token {
6589 Token::Word(word) if word.keyword == Keyword::USING => None,
6590 _ => self.parse_optional_indent()?,
6591 };
6592
6593 let index_type = self.parse_optional_using_then_index_type()?;
6594 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
6595
6596 Ok(Some(TableConstraint::Index {
6597 display_as_key,
6598 name,
6599 index_type,
6600 columns,
6601 }))
6602 }
6603 Token::Word(w)
6604 if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL)
6605 && dialect_of!(self is GenericDialect | MySqlDialect) =>
6606 {
6607 if let Some(name) = name {
6608 return self.expected(
6609 "FULLTEXT or SPATIAL option without constraint name",
6610 TokenWithLocation {
6611 token: Token::make_keyword(&name.to_string()),
6612 location: next_token.location,
6613 },
6614 );
6615 }
6616
6617 let fulltext = w.keyword == Keyword::FULLTEXT;
6618
6619 let index_type_display = self.parse_index_type_display();
6620
6621 let opt_index_name = self.parse_optional_indent()?;
6622
6623 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
6624
6625 Ok(Some(TableConstraint::FulltextOrSpatial {
6626 fulltext,
6627 index_type_display,
6628 opt_index_name,
6629 columns,
6630 }))
6631 }
6632 _ => {
6633 if name.is_some() {
6634 self.expected("PRIMARY, UNIQUE, FOREIGN, or CHECK", next_token)
6635 } else {
6636 self.prev_token();
6637 Ok(None)
6638 }
6639 }
6640 }
6641 }
6642
6643 pub fn maybe_parse_options(
6644 &mut self,
6645 keyword: Keyword,
6646 ) -> Result<Option<Vec<SqlOption>>, ParserError> {
6647 if let Token::Word(word) = self.peek_token().token {
6648 if word.keyword == keyword {
6649 return Ok(Some(self.parse_options(keyword)?));
6650 }
6651 };
6652 Ok(None)
6653 }
6654
6655 pub fn parse_options(&mut self, keyword: Keyword) -> Result<Vec<SqlOption>, ParserError> {
6656 if self.parse_keyword(keyword) {
6657 self.expect_token(&Token::LParen)?;
6658 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
6659 self.expect_token(&Token::RParen)?;
6660 Ok(options)
6661 } else {
6662 Ok(vec![])
6663 }
6664 }
6665
6666 pub fn parse_options_with_keywords(
6667 &mut self,
6668 keywords: &[Keyword],
6669 ) -> Result<Vec<SqlOption>, ParserError> {
6670 if self.parse_keywords(keywords) {
6671 self.expect_token(&Token::LParen)?;
6672 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
6673 self.expect_token(&Token::RParen)?;
6674 Ok(options)
6675 } else {
6676 Ok(vec![])
6677 }
6678 }
6679
6680 pub fn parse_index_type(&mut self) -> Result<IndexType, ParserError> {
6681 if self.parse_keyword(Keyword::BTREE) {
6682 Ok(IndexType::BTree)
6683 } else if self.parse_keyword(Keyword::HASH) {
6684 Ok(IndexType::Hash)
6685 } else {
6686 self.expected("index type {BTREE | HASH}", self.peek_token())
6687 }
6688 }
6689
6690 pub fn parse_optional_using_then_index_type(
6692 &mut self,
6693 ) -> Result<Option<IndexType>, ParserError> {
6694 if self.parse_keyword(Keyword::USING) {
6695 Ok(Some(self.parse_index_type()?))
6696 } else {
6697 Ok(None)
6698 }
6699 }
6700
6701 pub fn parse_optional_indent(&mut self) -> Result<Option<Ident>, ParserError> {
6704 self.maybe_parse(|parser| parser.parse_identifier(false))
6705 }
6706
6707 #[must_use]
6708 pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay {
6709 if self.parse_keyword(Keyword::KEY) {
6710 KeyOrIndexDisplay::Key
6711 } else if self.parse_keyword(Keyword::INDEX) {
6712 KeyOrIndexDisplay::Index
6713 } else {
6714 KeyOrIndexDisplay::None
6715 }
6716 }
6717
6718 pub fn parse_optional_index_option(&mut self) -> Result<Option<IndexOption>, ParserError> {
6719 if let Some(index_type) = self.parse_optional_using_then_index_type()? {
6720 Ok(Some(IndexOption::Using(index_type)))
6721 } else if self.parse_keyword(Keyword::COMMENT) {
6722 let s = self.parse_literal_string()?;
6723 Ok(Some(IndexOption::Comment(s)))
6724 } else {
6725 Ok(None)
6726 }
6727 }
6728
6729 pub fn parse_index_options(&mut self) -> Result<Vec<IndexOption>, ParserError> {
6730 let mut options = Vec::new();
6731
6732 loop {
6733 match self.parse_optional_index_option()? {
6734 Some(index_option) => options.push(index_option),
6735 None => return Ok(options),
6736 }
6737 }
6738 }
6739
6740 pub fn parse_sql_option(&mut self) -> Result<SqlOption, ParserError> {
6741 let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect);
6742
6743 match self.peek_token().token {
6744 Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => {
6745 Ok(SqlOption::Ident(self.parse_identifier(false)?))
6746 }
6747 Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => {
6748 self.parse_option_partition()
6749 }
6750 Token::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => {
6751 self.parse_option_clustered()
6752 }
6753 _ => {
6754 let name = self.parse_identifier(false)?;
6755 self.expect_token(&Token::Eq)?;
6756 let value = self.parse_expr()?;
6757
6758 Ok(SqlOption::KeyValue { key: name, value })
6759 }
6760 }
6761 }
6762
6763 pub fn parse_option_clustered(&mut self) -> Result<SqlOption, ParserError> {
6764 if self.parse_keywords(&[
6765 Keyword::CLUSTERED,
6766 Keyword::COLUMNSTORE,
6767 Keyword::INDEX,
6768 Keyword::ORDER,
6769 ]) {
6770 Ok(SqlOption::Clustered(
6771 TableOptionsClustered::ColumnstoreIndexOrder(
6772 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
6773 ),
6774 ))
6775 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::COLUMNSTORE, Keyword::INDEX]) {
6776 Ok(SqlOption::Clustered(
6777 TableOptionsClustered::ColumnstoreIndex,
6778 ))
6779 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::INDEX]) {
6780 self.expect_token(&Token::LParen)?;
6781
6782 let columns = self.parse_comma_separated(|p| {
6783 let name = p.parse_identifier(false)?;
6784 let asc = p.parse_asc_desc();
6785
6786 Ok(ClusteredIndex { name, asc })
6787 })?;
6788
6789 self.expect_token(&Token::RParen)?;
6790
6791 Ok(SqlOption::Clustered(TableOptionsClustered::Index(columns)))
6792 } else {
6793 Err(ParserError::ParserError(
6794 "invalid CLUSTERED sequence".to_string(),
6795 ))
6796 }
6797 }
6798
6799 pub fn parse_option_partition(&mut self) -> Result<SqlOption, ParserError> {
6800 self.expect_keyword(Keyword::PARTITION)?;
6801 self.expect_token(&Token::LParen)?;
6802 let column_name = self.parse_identifier(false)?;
6803
6804 self.expect_keyword(Keyword::RANGE)?;
6805 let range_direction = if self.parse_keyword(Keyword::LEFT) {
6806 Some(PartitionRangeDirection::Left)
6807 } else if self.parse_keyword(Keyword::RIGHT) {
6808 Some(PartitionRangeDirection::Right)
6809 } else {
6810 None
6811 };
6812
6813 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
6814 self.expect_token(&Token::LParen)?;
6815
6816 let for_values = self.parse_comma_separated(Parser::parse_expr)?;
6817
6818 self.expect_token(&Token::RParen)?;
6819 self.expect_token(&Token::RParen)?;
6820
6821 Ok(SqlOption::Partition {
6822 column_name,
6823 range_direction,
6824 for_values,
6825 })
6826 }
6827
6828 pub fn parse_partition(&mut self) -> Result<Partition, ParserError> {
6829 self.expect_token(&Token::LParen)?;
6830 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
6831 self.expect_token(&Token::RParen)?;
6832 Ok(Partition::Partitions(partitions))
6833 }
6834
6835 pub fn parse_projection_select(&mut self) -> Result<ProjectionSelect, ParserError> {
6836 self.expect_token(&Token::LParen)?;
6837 self.expect_keyword(Keyword::SELECT)?;
6838 let projection = self.parse_projection()?;
6839 let group_by = self.parse_optional_group_by()?;
6840 let order_by = self.parse_optional_order_by()?;
6841 self.expect_token(&Token::RParen)?;
6842 Ok(ProjectionSelect {
6843 projection,
6844 group_by,
6845 order_by,
6846 })
6847 }
6848 pub fn parse_alter_table_add_projection(&mut self) -> Result<AlterTableOperation, ParserError> {
6849 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6850 let name = self.parse_identifier(false)?;
6851 let query = self.parse_projection_select()?;
6852 Ok(AlterTableOperation::AddProjection {
6853 if_not_exists,
6854 name,
6855 select: query,
6856 })
6857 }
6858
6859 pub fn parse_alter_table_operation(&mut self) -> Result<AlterTableOperation, ParserError> {
6860 let operation = if self.parse_keyword(Keyword::ADD) {
6861 if let Some(constraint) = self.parse_optional_table_constraint()? {
6862 AlterTableOperation::AddConstraint(constraint)
6863 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
6864 && self.parse_keyword(Keyword::PROJECTION)
6865 {
6866 return self.parse_alter_table_add_projection();
6867 } else {
6868 let if_not_exists =
6869 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6870 let mut new_partitions = vec![];
6871 loop {
6872 if self.parse_keyword(Keyword::PARTITION) {
6873 new_partitions.push(self.parse_partition()?);
6874 } else {
6875 break;
6876 }
6877 }
6878 if !new_partitions.is_empty() {
6879 AlterTableOperation::AddPartitions {
6880 if_not_exists,
6881 new_partitions,
6882 }
6883 } else {
6884 let column_keyword = self.parse_keyword(Keyword::COLUMN);
6885
6886 let if_not_exists = if dialect_of!(self is PostgreSqlDialect | BigQueryDialect | DuckDbDialect | GenericDialect)
6887 {
6888 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS])
6889 || if_not_exists
6890 } else {
6891 false
6892 };
6893
6894 let column_def = self.parse_column_def()?;
6895
6896 let column_position = self.parse_column_position()?;
6897
6898 AlterTableOperation::AddColumn {
6899 column_keyword,
6900 if_not_exists,
6901 column_def,
6902 column_position,
6903 }
6904 }
6905 }
6906 } else if self.parse_keyword(Keyword::RENAME) {
6907 if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) {
6908 let old_name = self.parse_identifier(false)?;
6909 self.expect_keyword(Keyword::TO)?;
6910 let new_name = self.parse_identifier(false)?;
6911 AlterTableOperation::RenameConstraint { old_name, new_name }
6912 } else if self.parse_keyword(Keyword::TO) {
6913 let table_name = self.parse_object_name(false)?;
6914 AlterTableOperation::RenameTable { table_name }
6915 } else {
6916 let _ = self.parse_keyword(Keyword::COLUMN); let old_column_name = self.parse_identifier(false)?;
6918 self.expect_keyword(Keyword::TO)?;
6919 let new_column_name = self.parse_identifier(false)?;
6920 AlterTableOperation::RenameColumn {
6921 old_column_name,
6922 new_column_name,
6923 }
6924 }
6925 } else if self.parse_keyword(Keyword::DISABLE) {
6926 if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
6927 AlterTableOperation::DisableRowLevelSecurity {}
6928 } else if self.parse_keyword(Keyword::RULE) {
6929 let name = self.parse_identifier(false)?;
6930 AlterTableOperation::DisableRule { name }
6931 } else if self.parse_keyword(Keyword::TRIGGER) {
6932 let name = self.parse_identifier(false)?;
6933 AlterTableOperation::DisableTrigger { name }
6934 } else {
6935 return self.expected(
6936 "ROW LEVEL SECURITY, RULE, or TRIGGER after DISABLE",
6937 self.peek_token(),
6938 );
6939 }
6940 } else if self.parse_keyword(Keyword::ENABLE) {
6941 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) {
6942 let name = self.parse_identifier(false)?;
6943 AlterTableOperation::EnableAlwaysRule { name }
6944 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) {
6945 let name = self.parse_identifier(false)?;
6946 AlterTableOperation::EnableAlwaysTrigger { name }
6947 } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
6948 AlterTableOperation::EnableRowLevelSecurity {}
6949 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) {
6950 let name = self.parse_identifier(false)?;
6951 AlterTableOperation::EnableReplicaRule { name }
6952 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) {
6953 let name = self.parse_identifier(false)?;
6954 AlterTableOperation::EnableReplicaTrigger { name }
6955 } else if self.parse_keyword(Keyword::RULE) {
6956 let name = self.parse_identifier(false)?;
6957 AlterTableOperation::EnableRule { name }
6958 } else if self.parse_keyword(Keyword::TRIGGER) {
6959 let name = self.parse_identifier(false)?;
6960 AlterTableOperation::EnableTrigger { name }
6961 } else {
6962 return self.expected(
6963 "ALWAYS, REPLICA, ROW LEVEL SECURITY, RULE, or TRIGGER after ENABLE",
6964 self.peek_token(),
6965 );
6966 }
6967 } else if self.parse_keywords(&[Keyword::CLEAR, Keyword::PROJECTION])
6968 && dialect_of!(self is ClickHouseDialect|GenericDialect)
6969 {
6970 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6971 let name = self.parse_identifier(false)?;
6972 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
6973 Some(self.parse_identifier(false)?)
6974 } else {
6975 None
6976 };
6977 AlterTableOperation::ClearProjection {
6978 if_exists,
6979 name,
6980 partition,
6981 }
6982 } else if self.parse_keywords(&[Keyword::MATERIALIZE, Keyword::PROJECTION])
6983 && dialect_of!(self is ClickHouseDialect|GenericDialect)
6984 {
6985 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6986 let name = self.parse_identifier(false)?;
6987 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
6988 Some(self.parse_identifier(false)?)
6989 } else {
6990 None
6991 };
6992 AlterTableOperation::MaterializeProjection {
6993 if_exists,
6994 name,
6995 partition,
6996 }
6997 } else if self.parse_keyword(Keyword::DROP) {
6998 if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) {
6999 self.expect_token(&Token::LParen)?;
7000 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
7001 self.expect_token(&Token::RParen)?;
7002 AlterTableOperation::DropPartitions {
7003 partitions,
7004 if_exists: true,
7005 }
7006 } else if self.parse_keyword(Keyword::PARTITION) {
7007 self.expect_token(&Token::LParen)?;
7008 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
7009 self.expect_token(&Token::RParen)?;
7010 AlterTableOperation::DropPartitions {
7011 partitions,
7012 if_exists: false,
7013 }
7014 } else if self.parse_keyword(Keyword::CONSTRAINT) {
7015 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7016 let name = self.parse_identifier(false)?;
7017 let cascade = self.parse_keyword(Keyword::CASCADE);
7018 AlterTableOperation::DropConstraint {
7019 if_exists,
7020 name,
7021 cascade,
7022 }
7023 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
7024 && dialect_of!(self is MySqlDialect | GenericDialect)
7025 {
7026 AlterTableOperation::DropPrimaryKey
7027 } else if self.parse_keyword(Keyword::PROJECTION)
7028 && dialect_of!(self is ClickHouseDialect|GenericDialect)
7029 {
7030 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7031 let name = self.parse_identifier(false)?;
7032 AlterTableOperation::DropProjection { if_exists, name }
7033 } else {
7034 let _ = self.parse_keyword(Keyword::COLUMN); let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7036 let column_name = self.parse_identifier(false)?;
7037 let cascade = self.parse_keyword(Keyword::CASCADE);
7038 AlterTableOperation::DropColumn {
7039 column_name,
7040 if_exists,
7041 cascade,
7042 }
7043 }
7044 } else if self.parse_keyword(Keyword::PARTITION) {
7045 self.expect_token(&Token::LParen)?;
7046 let before = self.parse_comma_separated(Parser::parse_expr)?;
7047 self.expect_token(&Token::RParen)?;
7048 self.expect_keyword(Keyword::RENAME)?;
7049 self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?;
7050 self.expect_token(&Token::LParen)?;
7051 let renames = self.parse_comma_separated(Parser::parse_expr)?;
7052 self.expect_token(&Token::RParen)?;
7053 AlterTableOperation::RenamePartitions {
7054 old_partitions: before,
7055 new_partitions: renames,
7056 }
7057 } else if self.parse_keyword(Keyword::CHANGE) {
7058 let _ = self.parse_keyword(Keyword::COLUMN); let old_name = self.parse_identifier(false)?;
7060 let new_name = self.parse_identifier(false)?;
7061 let data_type = self.parse_data_type()?;
7062 let mut options = vec![];
7063 while let Some(option) = self.parse_optional_column_option()? {
7064 options.push(option);
7065 }
7066
7067 let column_position = self.parse_column_position()?;
7068
7069 AlterTableOperation::ChangeColumn {
7070 old_name,
7071 new_name,
7072 data_type,
7073 options,
7074 column_position,
7075 }
7076 } else if self.parse_keyword(Keyword::MODIFY) {
7077 let _ = self.parse_keyword(Keyword::COLUMN); let col_name = self.parse_identifier(false)?;
7079 let data_type = self.parse_data_type()?;
7080 let mut options = vec![];
7081 while let Some(option) = self.parse_optional_column_option()? {
7082 options.push(option);
7083 }
7084
7085 let column_position = self.parse_column_position()?;
7086
7087 AlterTableOperation::ModifyColumn {
7088 col_name,
7089 data_type,
7090 options,
7091 column_position,
7092 }
7093 } else if self.parse_keyword(Keyword::ALTER) {
7094 let _ = self.parse_keyword(Keyword::COLUMN); let column_name = self.parse_identifier(false)?;
7096 let is_postgresql = dialect_of!(self is PostgreSqlDialect);
7097
7098 let op: AlterColumnOperation = if self.parse_keywords(&[
7099 Keyword::SET,
7100 Keyword::NOT,
7101 Keyword::NULL,
7102 ]) {
7103 AlterColumnOperation::SetNotNull {}
7104 } else if self.parse_keywords(&[Keyword::DROP, Keyword::NOT, Keyword::NULL]) {
7105 AlterColumnOperation::DropNotNull {}
7106 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
7107 AlterColumnOperation::SetDefault {
7108 value: self.parse_expr()?,
7109 }
7110 } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
7111 AlterColumnOperation::DropDefault {}
7112 } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE])
7113 || (is_postgresql && self.parse_keyword(Keyword::TYPE))
7114 {
7115 let data_type = self.parse_data_type()?;
7116 let using = if is_postgresql && self.parse_keyword(Keyword::USING) {
7117 Some(self.parse_expr()?)
7118 } else {
7119 None
7120 };
7121 AlterColumnOperation::SetDataType { data_type, using }
7122 } else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) {
7123 let generated_as = if self.parse_keyword(Keyword::ALWAYS) {
7124 Some(GeneratedAs::Always)
7125 } else if self.parse_keywords(&[Keyword::BY, Keyword::DEFAULT]) {
7126 Some(GeneratedAs::ByDefault)
7127 } else {
7128 None
7129 };
7130
7131 self.expect_keywords(&[Keyword::AS, Keyword::IDENTITY])?;
7132
7133 let mut sequence_options: Option<Vec<SequenceOptions>> = None;
7134
7135 if self.peek_token().token == Token::LParen {
7136 self.expect_token(&Token::LParen)?;
7137 sequence_options = Some(self.parse_create_sequence_options()?);
7138 self.expect_token(&Token::RParen)?;
7139 }
7140
7141 AlterColumnOperation::AddGenerated {
7142 generated_as,
7143 sequence_options,
7144 }
7145 } else {
7146 let message = if is_postgresql {
7147 "SET/DROP NOT NULL, SET DEFAULT, SET DATA TYPE, or ADD GENERATED after ALTER COLUMN"
7148 } else {
7149 "SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN"
7150 };
7151
7152 return self.expected(message, self.peek_token());
7153 };
7154 AlterTableOperation::AlterColumn { column_name, op }
7155 } else if self.parse_keyword(Keyword::SWAP) {
7156 self.expect_keyword(Keyword::WITH)?;
7157 let table_name = self.parse_object_name(false)?;
7158 AlterTableOperation::SwapWith { table_name }
7159 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect)
7160 && self.parse_keywords(&[Keyword::OWNER, Keyword::TO])
7161 {
7162 let new_owner = self.parse_owner()?;
7163 AlterTableOperation::OwnerTo { new_owner }
7164 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
7165 && self.parse_keyword(Keyword::ATTACH)
7166 {
7167 AlterTableOperation::AttachPartition {
7168 partition: self.parse_part_or_partition()?,
7169 }
7170 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
7171 && self.parse_keyword(Keyword::DETACH)
7172 {
7173 AlterTableOperation::DetachPartition {
7174 partition: self.parse_part_or_partition()?,
7175 }
7176 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
7177 && self.parse_keyword(Keyword::FREEZE)
7178 {
7179 let partition = self.parse_part_or_partition()?;
7180 let with_name = if self.parse_keyword(Keyword::WITH) {
7181 self.expect_keyword(Keyword::NAME)?;
7182 Some(self.parse_identifier(false)?)
7183 } else {
7184 None
7185 };
7186 AlterTableOperation::FreezePartition {
7187 partition,
7188 with_name,
7189 }
7190 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
7191 && self.parse_keyword(Keyword::UNFREEZE)
7192 {
7193 let partition = self.parse_part_or_partition()?;
7194 let with_name = if self.parse_keyword(Keyword::WITH) {
7195 self.expect_keyword(Keyword::NAME)?;
7196 Some(self.parse_identifier(false)?)
7197 } else {
7198 None
7199 };
7200 AlterTableOperation::UnfreezePartition {
7201 partition,
7202 with_name,
7203 }
7204 } else {
7205 let options: Vec<SqlOption> =
7206 self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?;
7207 if !options.is_empty() {
7208 AlterTableOperation::SetTblProperties {
7209 table_properties: options,
7210 }
7211 } else {
7212 return self.expected(
7213 "ADD, RENAME, PARTITION, SWAP, DROP, or SET TBLPROPERTIES after ALTER TABLE",
7214 self.peek_token(),
7215 );
7216 }
7217 };
7218 Ok(operation)
7219 }
7220
7221 fn parse_part_or_partition(&mut self) -> Result<Partition, ParserError> {
7222 let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?;
7223 match keyword {
7224 Keyword::PART => Ok(Partition::Part(self.parse_expr()?)),
7225 Keyword::PARTITION => Ok(Partition::Expr(self.parse_expr()?)),
7226 _ => unreachable!(),
7228 }
7229 }
7230
7231 pub fn parse_alter(&mut self) -> Result<Statement, ParserError> {
7232 let object_type = self.expect_one_of_keywords(&[
7233 Keyword::VIEW,
7234 Keyword::TABLE,
7235 Keyword::INDEX,
7236 Keyword::ROLE,
7237 Keyword::POLICY,
7238 ])?;
7239 match object_type {
7240 Keyword::VIEW => self.parse_alter_view(),
7241 Keyword::TABLE => {
7242 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7243 let only = self.parse_keyword(Keyword::ONLY); let table_name = self.parse_object_name(false)?;
7245 let on_cluster = self.parse_optional_on_cluster()?;
7246 let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?;
7247
7248 let mut location = None;
7249 if self.parse_keyword(Keyword::LOCATION) {
7250 location = Some(HiveSetLocation {
7251 has_set: false,
7252 location: self.parse_identifier(false)?,
7253 });
7254 } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) {
7255 location = Some(HiveSetLocation {
7256 has_set: true,
7257 location: self.parse_identifier(false)?,
7258 });
7259 }
7260
7261 Ok(Statement::AlterTable {
7262 name: table_name,
7263 if_exists,
7264 only,
7265 operations,
7266 location,
7267 on_cluster,
7268 })
7269 }
7270 Keyword::INDEX => {
7271 let index_name = self.parse_object_name(false)?;
7272 let operation = if self.parse_keyword(Keyword::RENAME) {
7273 if self.parse_keyword(Keyword::TO) {
7274 let index_name = self.parse_object_name(false)?;
7275 AlterIndexOperation::RenameIndex { index_name }
7276 } else {
7277 return self.expected("TO after RENAME", self.peek_token());
7278 }
7279 } else {
7280 return self.expected("RENAME after ALTER INDEX", self.peek_token());
7281 };
7282
7283 Ok(Statement::AlterIndex {
7284 name: index_name,
7285 operation,
7286 })
7287 }
7288 Keyword::ROLE => self.parse_alter_role(),
7289 Keyword::POLICY => self.parse_alter_policy(),
7290 _ => unreachable!(),
7292 }
7293 }
7294
7295 pub fn parse_alter_view(&mut self) -> Result<Statement, ParserError> {
7296 let name = self.parse_object_name(false)?;
7297 let columns = self.parse_parenthesized_column_list(Optional, false)?;
7298
7299 let with_options = self.parse_options(Keyword::WITH)?;
7300
7301 self.expect_keyword(Keyword::AS)?;
7302 let query = self.parse_query()?;
7303
7304 Ok(Statement::AlterView {
7305 name,
7306 columns,
7307 query,
7308 with_options,
7309 })
7310 }
7311
7312 pub fn parse_call(&mut self) -> Result<Statement, ParserError> {
7315 let object_name = self.parse_object_name(false)?;
7316 if self.peek_token().token == Token::LParen {
7317 match self.parse_function(object_name)? {
7318 Expr::Function(f) => Ok(Statement::Call(f)),
7319 other => parser_err!(
7320 format!("Expected a simple procedure call but found: {other}"),
7321 self.peek_token().location
7322 ),
7323 }
7324 } else {
7325 Ok(Statement::Call(Function {
7326 name: object_name,
7327 parameters: FunctionArguments::None,
7328 args: FunctionArguments::None,
7329 over: None,
7330 filter: None,
7331 null_treatment: None,
7332 within_group: vec![],
7333 }))
7334 }
7335 }
7336
7337 pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
7339 let source;
7340 if self.consume_token(&Token::LParen) {
7341 source = CopySource::Query(self.parse_query()?);
7342 self.expect_token(&Token::RParen)?;
7343 } else {
7344 let table_name = self.parse_object_name(false)?;
7345 let columns = self.parse_parenthesized_column_list(Optional, false)?;
7346 source = CopySource::Table {
7347 table_name,
7348 columns,
7349 };
7350 }
7351 let to = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::TO]) {
7352 Some(Keyword::FROM) => false,
7353 Some(Keyword::TO) => true,
7354 _ => self.expected("FROM or TO", self.peek_token())?,
7355 };
7356 if !to {
7357 if let CopySource::Query(_) = source {
7360 return Err(ParserError::ParserError(
7361 "COPY ... FROM does not support query as a source".to_string(),
7362 ));
7363 }
7364 }
7365 let target = if self.parse_keyword(Keyword::STDIN) {
7366 CopyTarget::Stdin
7367 } else if self.parse_keyword(Keyword::STDOUT) {
7368 CopyTarget::Stdout
7369 } else if self.parse_keyword(Keyword::PROGRAM) {
7370 CopyTarget::Program {
7371 command: self.parse_literal_string()?,
7372 }
7373 } else {
7374 CopyTarget::File {
7375 filename: self.parse_literal_string()?,
7376 }
7377 };
7378 let _ = self.parse_keyword(Keyword::WITH); let mut options = vec![];
7380 if self.consume_token(&Token::LParen) {
7381 options = self.parse_comma_separated(Parser::parse_copy_option)?;
7382 self.expect_token(&Token::RParen)?;
7383 }
7384 let mut legacy_options = vec![];
7385 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
7386 legacy_options.push(opt);
7387 }
7388 let values = if let CopyTarget::Stdin = target {
7389 self.expect_token(&Token::SemiColon)?;
7390 self.parse_tsv()
7391 } else {
7392 vec![]
7393 };
7394 Ok(Statement::Copy {
7395 source,
7396 to,
7397 target,
7398 options,
7399 legacy_options,
7400 values,
7401 })
7402 }
7403
7404 pub fn parse_close(&mut self) -> Result<Statement, ParserError> {
7405 let cursor = if self.parse_keyword(Keyword::ALL) {
7406 CloseCursor::All
7407 } else {
7408 let name = self.parse_identifier(false)?;
7409
7410 CloseCursor::Specific { name }
7411 };
7412
7413 Ok(Statement::Close { cursor })
7414 }
7415
7416 fn parse_copy_option(&mut self) -> Result<CopyOption, ParserError> {
7417 let ret = match self.parse_one_of_keywords(&[
7418 Keyword::FORMAT,
7419 Keyword::FREEZE,
7420 Keyword::DELIMITER,
7421 Keyword::NULL,
7422 Keyword::HEADER,
7423 Keyword::QUOTE,
7424 Keyword::ESCAPE,
7425 Keyword::FORCE_QUOTE,
7426 Keyword::FORCE_NOT_NULL,
7427 Keyword::FORCE_NULL,
7428 Keyword::ENCODING,
7429 ]) {
7430 Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier(false)?),
7431 Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!(
7432 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
7433 Some(Keyword::FALSE)
7434 )),
7435 Some(Keyword::DELIMITER) => CopyOption::Delimiter(self.parse_literal_char()?),
7436 Some(Keyword::NULL) => CopyOption::Null(self.parse_literal_string()?),
7437 Some(Keyword::HEADER) => CopyOption::Header(!matches!(
7438 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
7439 Some(Keyword::FALSE)
7440 )),
7441 Some(Keyword::QUOTE) => CopyOption::Quote(self.parse_literal_char()?),
7442 Some(Keyword::ESCAPE) => CopyOption::Escape(self.parse_literal_char()?),
7443 Some(Keyword::FORCE_QUOTE) => {
7444 CopyOption::ForceQuote(self.parse_parenthesized_column_list(Mandatory, false)?)
7445 }
7446 Some(Keyword::FORCE_NOT_NULL) => {
7447 CopyOption::ForceNotNull(self.parse_parenthesized_column_list(Mandatory, false)?)
7448 }
7449 Some(Keyword::FORCE_NULL) => {
7450 CopyOption::ForceNull(self.parse_parenthesized_column_list(Mandatory, false)?)
7451 }
7452 Some(Keyword::ENCODING) => CopyOption::Encoding(self.parse_literal_string()?),
7453 _ => self.expected("option", self.peek_token())?,
7454 };
7455 Ok(ret)
7456 }
7457
7458 fn parse_copy_legacy_option(&mut self) -> Result<CopyLegacyOption, ParserError> {
7459 let ret = match self.parse_one_of_keywords(&[
7460 Keyword::BINARY,
7461 Keyword::DELIMITER,
7462 Keyword::NULL,
7463 Keyword::CSV,
7464 ]) {
7465 Some(Keyword::BINARY) => CopyLegacyOption::Binary,
7466 Some(Keyword::DELIMITER) => {
7467 let _ = self.parse_keyword(Keyword::AS); CopyLegacyOption::Delimiter(self.parse_literal_char()?)
7469 }
7470 Some(Keyword::NULL) => {
7471 let _ = self.parse_keyword(Keyword::AS); CopyLegacyOption::Null(self.parse_literal_string()?)
7473 }
7474 Some(Keyword::CSV) => CopyLegacyOption::Csv({
7475 let mut opts = vec![];
7476 while let Some(opt) =
7477 self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())?
7478 {
7479 opts.push(opt);
7480 }
7481 opts
7482 }),
7483 _ => self.expected("option", self.peek_token())?,
7484 };
7485 Ok(ret)
7486 }
7487
7488 fn parse_copy_legacy_csv_option(&mut self) -> Result<CopyLegacyCsvOption, ParserError> {
7489 let ret = match self.parse_one_of_keywords(&[
7490 Keyword::HEADER,
7491 Keyword::QUOTE,
7492 Keyword::ESCAPE,
7493 Keyword::FORCE,
7494 ]) {
7495 Some(Keyword::HEADER) => CopyLegacyCsvOption::Header,
7496 Some(Keyword::QUOTE) => {
7497 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Quote(self.parse_literal_char()?)
7499 }
7500 Some(Keyword::ESCAPE) => {
7501 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Escape(self.parse_literal_char()?)
7503 }
7504 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => {
7505 CopyLegacyCsvOption::ForceNotNull(
7506 self.parse_comma_separated(|p| p.parse_identifier(false))?,
7507 )
7508 }
7509 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => {
7510 CopyLegacyCsvOption::ForceQuote(
7511 self.parse_comma_separated(|p| p.parse_identifier(false))?,
7512 )
7513 }
7514 _ => self.expected("csv option", self.peek_token())?,
7515 };
7516 Ok(ret)
7517 }
7518
7519 fn parse_literal_char(&mut self) -> Result<char, ParserError> {
7520 let s = self.parse_literal_string()?;
7521 if s.len() != 1 {
7522 let loc = self
7523 .tokens
7524 .get(self.index - 1)
7525 .map_or(Location { line: 0, column: 0 }, |t| t.location);
7526 return parser_err!(format!("Expect a char, found {s:?}"), loc);
7527 }
7528 Ok(s.chars().next().unwrap())
7529 }
7530
7531 pub fn parse_tsv(&mut self) -> Vec<Option<String>> {
7534 self.parse_tab_value()
7535 }
7536
7537 pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
7538 let mut values = vec![];
7539 let mut content = String::from("");
7540 while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
7541 match t {
7542 Token::Whitespace(Whitespace::Tab) => {
7543 values.push(Some(content.to_string()));
7544 content.clear();
7545 }
7546 Token::Whitespace(Whitespace::Newline) => {
7547 values.push(Some(content.to_string()));
7548 content.clear();
7549 }
7550 Token::Backslash => {
7551 if self.consume_token(&Token::Period) {
7552 return values;
7553 }
7554 if let Token::Word(w) = self.next_token().token {
7555 if w.value == "N" {
7556 values.push(None);
7557 }
7558 }
7559 }
7560 _ => {
7561 content.push_str(&t.to_string());
7562 }
7563 }
7564 }
7565 values
7566 }
7567
7568 pub fn parse_value(&mut self) -> Result<Value, ParserError> {
7570 let next_token = self.next_token();
7571 let location = next_token.location;
7572 match next_token.token {
7573 Token::Word(w) => match w.keyword {
7574 Keyword::TRUE => Ok(Value::Boolean(true)),
7575 Keyword::FALSE => Ok(Value::Boolean(false)),
7576 Keyword::NULL => Ok(Value::Null),
7577 Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style {
7578 Some('"') => Ok(Value::DoubleQuotedString(w.value)),
7579 Some('\'') => Ok(Value::SingleQuotedString(w.value)),
7580 _ => self.expected(
7581 "A value?",
7582 TokenWithLocation {
7583 token: Token::Word(w),
7584 location,
7585 },
7586 )?,
7587 },
7588 _ => self.expected(
7589 "a concrete value",
7590 TokenWithLocation {
7591 token: Token::Word(w),
7592 location,
7593 },
7594 ),
7595 },
7596 Token::Number(n, l) => Ok(Value::Number(Self::parse(n, location)?, l)),
7600 Token::SingleQuotedString(ref s) => Ok(Value::SingleQuotedString(s.to_string())),
7601 Token::DoubleQuotedString(ref s) => Ok(Value::DoubleQuotedString(s.to_string())),
7602 Token::TripleSingleQuotedString(ref s) => {
7603 Ok(Value::TripleSingleQuotedString(s.to_string()))
7604 }
7605 Token::TripleDoubleQuotedString(ref s) => {
7606 Ok(Value::TripleDoubleQuotedString(s.to_string()))
7607 }
7608 Token::DollarQuotedString(ref s) => Ok(Value::DollarQuotedString(s.clone())),
7609 Token::SingleQuotedByteStringLiteral(ref s) => {
7610 Ok(Value::SingleQuotedByteStringLiteral(s.clone()))
7611 }
7612 Token::DoubleQuotedByteStringLiteral(ref s) => {
7613 Ok(Value::DoubleQuotedByteStringLiteral(s.clone()))
7614 }
7615 Token::TripleSingleQuotedByteStringLiteral(ref s) => {
7616 Ok(Value::TripleSingleQuotedByteStringLiteral(s.clone()))
7617 }
7618 Token::TripleDoubleQuotedByteStringLiteral(ref s) => {
7619 Ok(Value::TripleDoubleQuotedByteStringLiteral(s.clone()))
7620 }
7621 Token::SingleQuotedRawStringLiteral(ref s) => {
7622 Ok(Value::SingleQuotedRawStringLiteral(s.clone()))
7623 }
7624 Token::DoubleQuotedRawStringLiteral(ref s) => {
7625 Ok(Value::DoubleQuotedRawStringLiteral(s.clone()))
7626 }
7627 Token::TripleSingleQuotedRawStringLiteral(ref s) => {
7628 Ok(Value::TripleSingleQuotedRawStringLiteral(s.clone()))
7629 }
7630 Token::TripleDoubleQuotedRawStringLiteral(ref s) => {
7631 Ok(Value::TripleDoubleQuotedRawStringLiteral(s.clone()))
7632 }
7633 Token::NationalStringLiteral(ref s) => Ok(Value::NationalStringLiteral(s.to_string())),
7634 Token::EscapedStringLiteral(ref s) => Ok(Value::EscapedStringLiteral(s.to_string())),
7635 Token::UnicodeStringLiteral(ref s) => Ok(Value::UnicodeStringLiteral(s.to_string())),
7636 Token::HexStringLiteral(ref s) => Ok(Value::HexStringLiteral(s.to_string())),
7637 Token::Placeholder(ref s) => Ok(Value::Placeholder(s.to_string())),
7638 tok @ Token::Colon | tok @ Token::AtSign => {
7639 let next_token = self.next_token();
7642 let ident = match next_token.token {
7643 Token::Word(w) => Ok(w.to_ident()),
7644 Token::Number(w, false) => Ok(Ident::new(w)),
7645 _ => self.expected("placeholder", next_token),
7646 }?;
7647 let placeholder = tok.to_string() + &ident.value;
7648 Ok(Value::Placeholder(placeholder))
7649 }
7650 unexpected => self.expected(
7651 "a value",
7652 TokenWithLocation {
7653 token: unexpected,
7654 location,
7655 },
7656 ),
7657 }
7658 }
7659
7660 pub fn parse_number_value(&mut self) -> Result<Value, ParserError> {
7662 match self.parse_value()? {
7663 v @ Value::Number(_, _) => Ok(v),
7664 v @ Value::Placeholder(_) => Ok(v),
7665 _ => {
7666 self.prev_token();
7667 self.expected("literal number", self.peek_token())
7668 }
7669 }
7670 }
7671
7672 pub fn parse_number(&mut self) -> Result<Expr, ParserError> {
7675 let next_token = self.next_token();
7676 match next_token.token {
7677 Token::Plus => Ok(Expr::UnaryOp {
7678 op: UnaryOperator::Plus,
7679 expr: Box::new(Expr::Value(self.parse_number_value()?)),
7680 }),
7681 Token::Minus => Ok(Expr::UnaryOp {
7682 op: UnaryOperator::Minus,
7683 expr: Box::new(Expr::Value(self.parse_number_value()?)),
7684 }),
7685 _ => {
7686 self.prev_token();
7687 Ok(Expr::Value(self.parse_number_value()?))
7688 }
7689 }
7690 }
7691
7692 fn parse_introduced_string_value(&mut self) -> Result<Value, ParserError> {
7693 let next_token = self.next_token();
7694 let location = next_token.location;
7695 match next_token.token {
7696 Token::SingleQuotedString(ref s) => Ok(Value::SingleQuotedString(s.to_string())),
7697 Token::DoubleQuotedString(ref s) => Ok(Value::DoubleQuotedString(s.to_string())),
7698 Token::HexStringLiteral(ref s) => Ok(Value::HexStringLiteral(s.to_string())),
7699 unexpected => self.expected(
7700 "a string value",
7701 TokenWithLocation {
7702 token: unexpected,
7703 location,
7704 },
7705 ),
7706 }
7707 }
7708
7709 pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
7711 let next_token = self.next_token();
7712 match next_token.token {
7713 Token::Number(s, _) => Self::parse::<u64>(s, next_token.location),
7714 _ => self.expected("literal int", next_token),
7715 }
7716 }
7717
7718 fn parse_create_function_body_string(&mut self) -> Result<Expr, ParserError> {
7721 let peek_token = self.peek_token();
7722 match peek_token.token {
7723 Token::DollarQuotedString(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
7724 {
7725 self.next_token();
7726 Ok(Expr::Value(Value::DollarQuotedString(s)))
7727 }
7728 _ => Ok(Expr::Value(Value::SingleQuotedString(
7729 self.parse_literal_string()?,
7730 ))),
7731 }
7732 }
7733
7734 pub fn parse_literal_string(&mut self) -> Result<String, ParserError> {
7736 let next_token = self.next_token();
7737 match next_token.token {
7738 Token::Word(Word {
7739 value,
7740 keyword: Keyword::NoKeyword,
7741 ..
7742 }) => Ok(value),
7743 Token::SingleQuotedString(s) => Ok(s),
7744 Token::DoubleQuotedString(s) => Ok(s),
7745 Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
7746 Ok(s)
7747 }
7748 Token::UnicodeStringLiteral(s) => Ok(s),
7749 _ => self.expected("literal string", next_token),
7750 }
7751 }
7752
7753 pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
7755 let (ty, trailing_bracket) = self.parse_data_type_helper()?;
7756 if trailing_bracket.0 {
7757 return parser_err!(
7758 format!("unmatched > after parsing data type {ty}"),
7759 self.peek_token()
7760 );
7761 }
7762
7763 Ok(ty)
7764 }
7765
7766 fn parse_data_type_helper(
7767 &mut self,
7768 ) -> Result<(DataType, MatchedTrailingBracket), ParserError> {
7769 let next_token = self.next_token();
7770 let mut trailing_bracket: MatchedTrailingBracket = false.into();
7771 let mut data = match next_token.token {
7772 Token::Word(w) => match w.keyword {
7773 Keyword::BOOLEAN => Ok(DataType::Boolean),
7774 Keyword::BOOL => Ok(DataType::Bool),
7775 Keyword::FLOAT => Ok(DataType::Float(self.parse_optional_precision()?)),
7776 Keyword::REAL => Ok(DataType::Real),
7777 Keyword::FLOAT4 => Ok(DataType::Float4),
7778 Keyword::FLOAT32 => Ok(DataType::Float32),
7779 Keyword::FLOAT64 => Ok(DataType::Float64),
7780 Keyword::FLOAT8 => Ok(DataType::Float8),
7781 Keyword::DOUBLE => {
7782 if self.parse_keyword(Keyword::PRECISION) {
7783 Ok(DataType::DoublePrecision)
7784 } else {
7785 Ok(DataType::Double)
7786 }
7787 }
7788 Keyword::TINYINT => {
7789 let optional_precision = self.parse_optional_precision();
7790 if self.parse_keyword(Keyword::UNSIGNED) {
7791 Ok(DataType::UnsignedTinyInt(optional_precision?))
7792 } else {
7793 Ok(DataType::TinyInt(optional_precision?))
7794 }
7795 }
7796 Keyword::INT2 => {
7797 let optional_precision = self.parse_optional_precision();
7798 if self.parse_keyword(Keyword::UNSIGNED) {
7799 Ok(DataType::UnsignedInt2(optional_precision?))
7800 } else {
7801 Ok(DataType::Int2(optional_precision?))
7802 }
7803 }
7804 Keyword::SMALLINT => {
7805 let optional_precision = self.parse_optional_precision();
7806 if self.parse_keyword(Keyword::UNSIGNED) {
7807 Ok(DataType::UnsignedSmallInt(optional_precision?))
7808 } else {
7809 Ok(DataType::SmallInt(optional_precision?))
7810 }
7811 }
7812 Keyword::MEDIUMINT => {
7813 let optional_precision = self.parse_optional_precision();
7814 if self.parse_keyword(Keyword::UNSIGNED) {
7815 Ok(DataType::UnsignedMediumInt(optional_precision?))
7816 } else {
7817 Ok(DataType::MediumInt(optional_precision?))
7818 }
7819 }
7820 Keyword::INT => {
7821 let optional_precision = self.parse_optional_precision();
7822 if self.parse_keyword(Keyword::UNSIGNED) {
7823 Ok(DataType::UnsignedInt(optional_precision?))
7824 } else {
7825 Ok(DataType::Int(optional_precision?))
7826 }
7827 }
7828 Keyword::INT4 => {
7829 let optional_precision = self.parse_optional_precision();
7830 if self.parse_keyword(Keyword::UNSIGNED) {
7831 Ok(DataType::UnsignedInt4(optional_precision?))
7832 } else {
7833 Ok(DataType::Int4(optional_precision?))
7834 }
7835 }
7836 Keyword::INT8 => {
7837 let optional_precision = self.parse_optional_precision();
7838 if self.parse_keyword(Keyword::UNSIGNED) {
7839 Ok(DataType::UnsignedInt8(optional_precision?))
7840 } else {
7841 Ok(DataType::Int8(optional_precision?))
7842 }
7843 }
7844 Keyword::INT16 => Ok(DataType::Int16),
7845 Keyword::INT32 => Ok(DataType::Int32),
7846 Keyword::INT64 => Ok(DataType::Int64),
7847 Keyword::INT128 => Ok(DataType::Int128),
7848 Keyword::INT256 => Ok(DataType::Int256),
7849 Keyword::INTEGER => {
7850 let optional_precision = self.parse_optional_precision();
7851 if self.parse_keyword(Keyword::UNSIGNED) {
7852 Ok(DataType::UnsignedInteger(optional_precision?))
7853 } else {
7854 Ok(DataType::Integer(optional_precision?))
7855 }
7856 }
7857 Keyword::BIGINT => {
7858 let optional_precision = self.parse_optional_precision();
7859 if self.parse_keyword(Keyword::UNSIGNED) {
7860 Ok(DataType::UnsignedBigInt(optional_precision?))
7861 } else {
7862 Ok(DataType::BigInt(optional_precision?))
7863 }
7864 }
7865 Keyword::UINT8 => Ok(DataType::UInt8),
7866 Keyword::UINT16 => Ok(DataType::UInt16),
7867 Keyword::UINT32 => Ok(DataType::UInt32),
7868 Keyword::UINT64 => Ok(DataType::UInt64),
7869 Keyword::UINT128 => Ok(DataType::UInt128),
7870 Keyword::UINT256 => Ok(DataType::UInt256),
7871 Keyword::VARCHAR => Ok(DataType::Varchar(self.parse_optional_character_length()?)),
7872 Keyword::NVARCHAR => {
7873 Ok(DataType::Nvarchar(self.parse_optional_character_length()?))
7874 }
7875 Keyword::CHARACTER => {
7876 if self.parse_keyword(Keyword::VARYING) {
7877 Ok(DataType::CharacterVarying(
7878 self.parse_optional_character_length()?,
7879 ))
7880 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
7881 Ok(DataType::CharacterLargeObject(
7882 self.parse_optional_precision()?,
7883 ))
7884 } else {
7885 Ok(DataType::Character(self.parse_optional_character_length()?))
7886 }
7887 }
7888 Keyword::CHAR => {
7889 if self.parse_keyword(Keyword::VARYING) {
7890 Ok(DataType::CharVarying(
7891 self.parse_optional_character_length()?,
7892 ))
7893 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
7894 Ok(DataType::CharLargeObject(self.parse_optional_precision()?))
7895 } else {
7896 Ok(DataType::Char(self.parse_optional_character_length()?))
7897 }
7898 }
7899 Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)),
7900 Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)),
7901 Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_precision()?)),
7902 Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)),
7903 Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)),
7904 Keyword::UUID => Ok(DataType::Uuid),
7905 Keyword::DATE => Ok(DataType::Date),
7906 Keyword::DATE32 => Ok(DataType::Date32),
7907 Keyword::DATETIME => Ok(DataType::Datetime(self.parse_optional_precision()?)),
7908 Keyword::DATETIME64 => {
7909 self.prev_token();
7910 let (precision, time_zone) = self.parse_datetime_64()?;
7911 Ok(DataType::Datetime64(precision, time_zone))
7912 }
7913 Keyword::TIMESTAMP => {
7914 let precision = self.parse_optional_precision()?;
7915 let tz = if self.parse_keyword(Keyword::WITH) {
7916 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
7917 TimezoneInfo::WithTimeZone
7918 } else if self.parse_keyword(Keyword::WITHOUT) {
7919 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
7920 TimezoneInfo::WithoutTimeZone
7921 } else {
7922 TimezoneInfo::None
7923 };
7924 Ok(DataType::Timestamp(precision, tz))
7925 }
7926 Keyword::TIMESTAMPTZ => Ok(DataType::Timestamp(
7927 self.parse_optional_precision()?,
7928 TimezoneInfo::Tz,
7929 )),
7930 Keyword::TIME => {
7931 let precision = self.parse_optional_precision()?;
7932 let tz = if self.parse_keyword(Keyword::WITH) {
7933 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
7934 TimezoneInfo::WithTimeZone
7935 } else if self.parse_keyword(Keyword::WITHOUT) {
7936 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
7937 TimezoneInfo::WithoutTimeZone
7938 } else {
7939 TimezoneInfo::None
7940 };
7941 Ok(DataType::Time(precision, tz))
7942 }
7943 Keyword::TIMETZ => Ok(DataType::Time(
7944 self.parse_optional_precision()?,
7945 TimezoneInfo::Tz,
7946 )),
7947 Keyword::INTERVAL => Ok(DataType::Interval),
7951 Keyword::JSON => Ok(DataType::JSON),
7952 Keyword::JSONB => Ok(DataType::JSONB),
7953 Keyword::REGCLASS => Ok(DataType::Regclass),
7954 Keyword::STRING => Ok(DataType::String(self.parse_optional_precision()?)),
7955 Keyword::FIXEDSTRING => {
7956 self.expect_token(&Token::LParen)?;
7957 let character_length = self.parse_literal_uint()?;
7958 self.expect_token(&Token::RParen)?;
7959 Ok(DataType::FixedString(character_length))
7960 }
7961 Keyword::TEXT => Ok(DataType::Text),
7962 Keyword::BYTEA => Ok(DataType::Bytea),
7963 Keyword::NUMERIC => Ok(DataType::Numeric(
7964 self.parse_exact_number_optional_precision_scale()?,
7965 )),
7966 Keyword::DECIMAL => Ok(DataType::Decimal(
7967 self.parse_exact_number_optional_precision_scale()?,
7968 )),
7969 Keyword::DEC => Ok(DataType::Dec(
7970 self.parse_exact_number_optional_precision_scale()?,
7971 )),
7972 Keyword::BIGNUMERIC => Ok(DataType::BigNumeric(
7973 self.parse_exact_number_optional_precision_scale()?,
7974 )),
7975 Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
7976 self.parse_exact_number_optional_precision_scale()?,
7977 )),
7978 Keyword::ENUM => Ok(DataType::Enum(self.parse_string_values()?)),
7979 Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
7980 Keyword::ARRAY => {
7981 if dialect_of!(self is SnowflakeDialect) {
7982 Ok(DataType::Array(ArrayElemTypeDef::None))
7983 } else if dialect_of!(self is ClickHouseDialect) {
7984 Ok(self.parse_sub_type(|internal_type| {
7985 DataType::Array(ArrayElemTypeDef::Parenthesis(internal_type))
7986 })?)
7987 } else {
7988 self.expect_token(&Token::Lt)?;
7989 let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?;
7990 trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?;
7991 Ok(DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
7992 inside_type,
7993 ))))
7994 }
7995 }
7996 Keyword::STRUCT if dialect_of!(self is DuckDbDialect) => {
7997 self.prev_token();
7998 let field_defs = self.parse_duckdb_struct_type_def()?;
7999 Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses))
8000 }
8001 Keyword::STRUCT if dialect_of!(self is BigQueryDialect | GenericDialect) => {
8002 self.prev_token();
8003 let (field_defs, _trailing_bracket) =
8004 self.parse_struct_type_def(Self::parse_struct_field_def)?;
8005 trailing_bracket = _trailing_bracket;
8006 Ok(DataType::Struct(
8007 field_defs,
8008 StructBracketKind::AngleBrackets,
8009 ))
8010 }
8011 Keyword::UNION if dialect_of!(self is DuckDbDialect | GenericDialect) => {
8012 self.prev_token();
8013 let fields = self.parse_union_type_def()?;
8014 Ok(DataType::Union(fields))
8015 }
8016 Keyword::NULLABLE if dialect_of!(self is ClickHouseDialect | GenericDialect) => {
8017 Ok(self.parse_sub_type(DataType::Nullable)?)
8018 }
8019 Keyword::LOWCARDINALITY if dialect_of!(self is ClickHouseDialect | GenericDialect) => {
8020 Ok(self.parse_sub_type(DataType::LowCardinality)?)
8021 }
8022 Keyword::MAP if dialect_of!(self is ClickHouseDialect | GenericDialect) => {
8023 self.prev_token();
8024 let (key_data_type, value_data_type) = self.parse_click_house_map_def()?;
8025 Ok(DataType::Map(
8026 Box::new(key_data_type),
8027 Box::new(value_data_type),
8028 ))
8029 }
8030 Keyword::NESTED if dialect_of!(self is ClickHouseDialect | GenericDialect) => {
8031 self.expect_token(&Token::LParen)?;
8032 let field_defs = self.parse_comma_separated(Parser::parse_column_def)?;
8033 self.expect_token(&Token::RParen)?;
8034 Ok(DataType::Nested(field_defs))
8035 }
8036 Keyword::TUPLE if dialect_of!(self is ClickHouseDialect | GenericDialect) => {
8037 self.prev_token();
8038 let field_defs = self.parse_click_house_tuple_def()?;
8039 Ok(DataType::Tuple(field_defs))
8040 }
8041 Keyword::TRIGGER => Ok(DataType::Trigger),
8042 _ => {
8043 self.prev_token();
8044 let type_name = self.parse_object_name(false)?;
8045 if let Some(modifiers) = self.parse_optional_type_modifiers()? {
8046 Ok(DataType::Custom(type_name, modifiers))
8047 } else {
8048 Ok(DataType::Custom(type_name, vec![]))
8049 }
8050 }
8051 },
8052 _ => self.expected("a data type name", next_token),
8053 }?;
8054
8055 while self.consume_token(&Token::LBracket) {
8058 let size = if dialect_of!(self is GenericDialect | DuckDbDialect | PostgreSqlDialect) {
8059 self.maybe_parse(|p| p.parse_literal_uint())?
8060 } else {
8061 None
8062 };
8063 self.expect_token(&Token::RBracket)?;
8064 data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size))
8065 }
8066 Ok((data, trailing_bracket))
8067 }
8068
8069 pub fn parse_string_values(&mut self) -> Result<Vec<String>, ParserError> {
8070 self.expect_token(&Token::LParen)?;
8071 let mut values = Vec::new();
8072 loop {
8073 let next_token = self.next_token();
8074 match next_token.token {
8075 Token::SingleQuotedString(value) => values.push(value),
8076 _ => self.expected("a string", next_token)?,
8077 }
8078 let next_token = self.next_token();
8079 match next_token.token {
8080 Token::Comma => (),
8081 Token::RParen => break,
8082 _ => self.expected(", or }", next_token)?,
8083 }
8084 }
8085 Ok(values)
8086 }
8087
8088 pub fn parse_identifier_with_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
8090 let ident = self.parse_identifier(false)?;
8091 self.expect_keyword(Keyword::AS)?;
8092 let alias = self.parse_identifier(false)?;
8093 Ok(IdentWithAlias { ident, alias })
8094 }
8095
8096 pub fn parse_optional_alias(
8100 &mut self,
8101 reserved_kwds: &[Keyword],
8102 ) -> Result<Option<Ident>, ParserError> {
8103 let after_as = self.parse_keyword(Keyword::AS);
8104 let next_token = self.next_token();
8105 match next_token.token {
8106 Token::Word(w) if after_as || !reserved_kwds.contains(&w.keyword) => {
8112 Ok(Some(w.to_ident()))
8113 }
8114 Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))),
8127 Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))),
8129 _ => {
8130 if after_as {
8131 return self.expected("an identifier after AS", next_token);
8132 }
8133 self.prev_token();
8134 Ok(None) }
8136 }
8137 }
8138
8139 pub fn parse_optional_table_alias(
8144 &mut self,
8145 reserved_kwds: &[Keyword],
8146 ) -> Result<Option<TableAlias>, ParserError> {
8147 match self.parse_optional_alias(reserved_kwds)? {
8148 Some(name) => {
8149 let columns = self.parse_parenthesized_column_list(Optional, false)?;
8150 Ok(Some(TableAlias { name, columns }))
8151 }
8152 None => Ok(None),
8153 }
8154 }
8155
8156 pub fn parse_optional_group_by(&mut self) -> Result<Option<GroupByExpr>, ParserError> {
8157 if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
8158 let expressions = if self.parse_keyword(Keyword::ALL) {
8159 None
8160 } else {
8161 Some(self.parse_comma_separated(Parser::parse_group_by_expr)?)
8162 };
8163
8164 let mut modifiers = vec![];
8165 if dialect_of!(self is ClickHouseDialect | GenericDialect) {
8166 loop {
8167 if !self.parse_keyword(Keyword::WITH) {
8168 break;
8169 }
8170 let keyword = self.expect_one_of_keywords(&[
8171 Keyword::ROLLUP,
8172 Keyword::CUBE,
8173 Keyword::TOTALS,
8174 ])?;
8175 modifiers.push(match keyword {
8176 Keyword::ROLLUP => GroupByWithModifier::Rollup,
8177 Keyword::CUBE => GroupByWithModifier::Cube,
8178 Keyword::TOTALS => GroupByWithModifier::Totals,
8179 _ => {
8180 return parser_err!(
8181 "BUG: expected to match GroupBy modifier keyword",
8182 self.peek_token().location
8183 )
8184 }
8185 });
8186 }
8187 }
8188 let group_by = match expressions {
8189 None => GroupByExpr::All(modifiers),
8190 Some(exprs) => GroupByExpr::Expressions(exprs, modifiers),
8191 };
8192 Ok(Some(group_by))
8193 } else {
8194 Ok(None)
8195 }
8196 }
8197
8198 pub fn parse_optional_order_by(&mut self) -> Result<Option<OrderBy>, ParserError> {
8199 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
8200 let order_by_exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
8201 let interpolate = if dialect_of!(self is ClickHouseDialect | GenericDialect) {
8202 self.parse_interpolations()?
8203 } else {
8204 None
8205 };
8206
8207 Ok(Some(OrderBy {
8208 exprs: order_by_exprs,
8209 interpolate,
8210 }))
8211 } else {
8212 Ok(None)
8213 }
8214 }
8215
8216 pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result<ObjectName, ParserError> {
8223 let mut idents = vec![];
8224 loop {
8225 idents.push(self.parse_identifier(in_table_clause)?);
8226 if !self.consume_token(&Token::Period) {
8227 break;
8228 }
8229 }
8230
8231 if dialect_of!(self is BigQueryDialect)
8234 && idents.iter().any(|ident| ident.value.contains('.'))
8235 {
8236 idents = idents
8237 .into_iter()
8238 .flat_map(|ident| {
8239 ident
8240 .value
8241 .split('.')
8242 .map(|value| Ident {
8243 value: value.into(),
8244 quote_style: ident.quote_style,
8245 })
8246 .collect::<Vec<_>>()
8247 })
8248 .collect()
8249 }
8250
8251 Ok(ObjectName(idents))
8252 }
8253
8254 pub fn parse_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
8256 let mut idents = vec![];
8257 loop {
8258 match self.peek_token().token {
8259 Token::Word(w) => {
8260 idents.push(w.to_ident());
8261 }
8262 Token::EOF | Token::Eq => break,
8263 _ => {}
8264 }
8265 self.next_token();
8266 }
8267 Ok(idents)
8268 }
8269
8270 pub fn parse_multipart_identifier(&mut self) -> Result<Vec<Ident>, ParserError> {
8310 let mut idents = vec![];
8311
8312 match self.next_token().token {
8314 Token::Word(w) => idents.push(w.to_ident()),
8315 Token::EOF => {
8316 return Err(ParserError::ParserError(
8317 "Empty input when parsing identifier".to_string(),
8318 ))?
8319 }
8320 token => {
8321 return Err(ParserError::ParserError(format!(
8322 "Unexpected token in identifier: {token}"
8323 )))?
8324 }
8325 };
8326
8327 loop {
8329 match self.next_token().token {
8330 Token::Period => match self.next_token().token {
8332 Token::Word(w) => idents.push(w.to_ident()),
8333 Token::EOF => {
8334 return Err(ParserError::ParserError(
8335 "Trailing period in identifier".to_string(),
8336 ))?
8337 }
8338 token => {
8339 return Err(ParserError::ParserError(format!(
8340 "Unexpected token following period in identifier: {token}"
8341 )))?
8342 }
8343 },
8344 Token::EOF => break,
8345 token => {
8346 return Err(ParserError::ParserError(format!(
8347 "Unexpected token in identifier: {token}"
8348 )))?
8349 }
8350 }
8351 }
8352
8353 Ok(idents)
8354 }
8355
8356 pub fn parse_identifier(&mut self, in_table_clause: bool) -> Result<Ident, ParserError> {
8362 let next_token = self.next_token();
8363 match next_token.token {
8364 Token::Word(w) => {
8365 let mut ident = w.to_ident();
8366
8367 if dialect_of!(self is BigQueryDialect)
8376 && w.quote_style.is_none()
8377 && in_table_clause
8378 {
8379 let mut requires_whitespace = false;
8380 while matches!(self.peek_token_no_skip().token, Token::Minus) {
8381 self.next_token();
8382 ident.value.push('-');
8383
8384 let token = self
8385 .next_token_no_skip()
8386 .cloned()
8387 .unwrap_or(TokenWithLocation::wrap(Token::EOF));
8388 requires_whitespace = match token.token {
8389 Token::Word(next_word) if next_word.quote_style.is_none() => {
8390 ident.value.push_str(&next_word.value);
8391 false
8392 }
8393 Token::Number(s, false) if s.chars().all(|c| c.is_ascii_digit()) => {
8394 ident.value.push_str(&s);
8395 true
8396 }
8397 _ => {
8398 return self
8399 .expected("continuation of hyphenated identifier", token);
8400 }
8401 }
8402 }
8403
8404 if requires_whitespace {
8407 let token = self.next_token();
8408 if !matches!(token.token, Token::EOF | Token::Whitespace(_)) {
8409 return self
8410 .expected("whitespace following hyphenated identifier", token);
8411 }
8412 }
8413 }
8414 Ok(ident)
8415 }
8416 Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
8417 Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
8418 _ => self.expected("identifier", next_token),
8419 }
8420 }
8421
8422 fn parse_view_columns(&mut self) -> Result<Vec<ViewColumnDef>, ParserError> {
8424 if self.consume_token(&Token::LParen) {
8425 if self.peek_token().token == Token::RParen {
8426 self.next_token();
8427 Ok(vec![])
8428 } else {
8429 let cols = self.parse_comma_separated(Parser::parse_view_column)?;
8430 self.expect_token(&Token::RParen)?;
8431 Ok(cols)
8432 }
8433 } else {
8434 Ok(vec![])
8435 }
8436 }
8437
8438 fn parse_view_column(&mut self) -> Result<ViewColumnDef, ParserError> {
8440 let name = self.parse_identifier(false)?;
8441 let options = if (dialect_of!(self is BigQueryDialect | GenericDialect)
8442 && self.parse_keyword(Keyword::OPTIONS))
8443 || (dialect_of!(self is SnowflakeDialect | GenericDialect)
8444 && self.parse_keyword(Keyword::COMMENT))
8445 {
8446 self.prev_token();
8447 self.parse_optional_column_option()?
8448 .map(|option| vec![option])
8449 } else {
8450 None
8451 };
8452 let data_type = if dialect_of!(self is ClickHouseDialect) {
8453 Some(self.parse_data_type()?)
8454 } else {
8455 None
8456 };
8457 Ok(ViewColumnDef {
8458 name,
8459 data_type,
8460 options,
8461 })
8462 }
8463
8464 pub fn parse_parenthesized_column_list(
8466 &mut self,
8467 optional: IsOptional,
8468 allow_empty: bool,
8469 ) -> Result<Vec<Ident>, ParserError> {
8470 if self.consume_token(&Token::LParen) {
8471 if allow_empty && self.peek_token().token == Token::RParen {
8472 self.next_token();
8473 Ok(vec![])
8474 } else {
8475 let cols = self.parse_comma_separated(|p| p.parse_identifier(false))?;
8476 self.expect_token(&Token::RParen)?;
8477 Ok(cols)
8478 }
8479 } else if optional == Optional {
8480 Ok(vec![])
8481 } else {
8482 self.expected("a list of columns in parentheses", self.peek_token())
8483 }
8484 }
8485
8486 pub fn parse_precision(&mut self) -> Result<u64, ParserError> {
8487 self.expect_token(&Token::LParen)?;
8488 let n = self.parse_literal_uint()?;
8489 self.expect_token(&Token::RParen)?;
8490 Ok(n)
8491 }
8492
8493 pub fn parse_optional_precision(&mut self) -> Result<Option<u64>, ParserError> {
8494 if self.consume_token(&Token::LParen) {
8495 let n = self.parse_literal_uint()?;
8496 self.expect_token(&Token::RParen)?;
8497 Ok(Some(n))
8498 } else {
8499 Ok(None)
8500 }
8501 }
8502
8503 pub fn parse_datetime_64(&mut self) -> Result<(u64, Option<String>), ParserError> {
8511 self.expect_keyword(Keyword::DATETIME64)?;
8512 self.expect_token(&Token::LParen)?;
8513 let precision = self.parse_literal_uint()?;
8514 let time_zone = if self.consume_token(&Token::Comma) {
8515 Some(self.parse_literal_string()?)
8516 } else {
8517 None
8518 };
8519 self.expect_token(&Token::RParen)?;
8520 Ok((precision, time_zone))
8521 }
8522
8523 pub fn parse_optional_character_length(
8524 &mut self,
8525 ) -> Result<Option<CharacterLength>, ParserError> {
8526 if self.consume_token(&Token::LParen) {
8527 let character_length = self.parse_character_length()?;
8528 self.expect_token(&Token::RParen)?;
8529 Ok(Some(character_length))
8530 } else {
8531 Ok(None)
8532 }
8533 }
8534
8535 pub fn parse_character_length(&mut self) -> Result<CharacterLength, ParserError> {
8536 if self.parse_keyword(Keyword::MAX) {
8537 return Ok(CharacterLength::Max);
8538 }
8539 let length = self.parse_literal_uint()?;
8540 let unit = if self.parse_keyword(Keyword::CHARACTERS) {
8541 Some(CharLengthUnits::Characters)
8542 } else if self.parse_keyword(Keyword::OCTETS) {
8543 Some(CharLengthUnits::Octets)
8544 } else {
8545 None
8546 };
8547 Ok(CharacterLength::IntegerLength { length, unit })
8548 }
8549
8550 pub fn parse_optional_precision_scale(
8551 &mut self,
8552 ) -> Result<(Option<u64>, Option<u64>), ParserError> {
8553 if self.consume_token(&Token::LParen) {
8554 let n = self.parse_literal_uint()?;
8555 let scale = if self.consume_token(&Token::Comma) {
8556 Some(self.parse_literal_uint()?)
8557 } else {
8558 None
8559 };
8560 self.expect_token(&Token::RParen)?;
8561 Ok((Some(n), scale))
8562 } else {
8563 Ok((None, None))
8564 }
8565 }
8566
8567 pub fn parse_exact_number_optional_precision_scale(
8568 &mut self,
8569 ) -> Result<ExactNumberInfo, ParserError> {
8570 if self.consume_token(&Token::LParen) {
8571 let precision = self.parse_literal_uint()?;
8572 let scale = if self.consume_token(&Token::Comma) {
8573 Some(self.parse_literal_uint()?)
8574 } else {
8575 None
8576 };
8577
8578 self.expect_token(&Token::RParen)?;
8579
8580 match scale {
8581 None => Ok(ExactNumberInfo::Precision(precision)),
8582 Some(scale) => Ok(ExactNumberInfo::PrecisionAndScale(precision, scale)),
8583 }
8584 } else {
8585 Ok(ExactNumberInfo::None)
8586 }
8587 }
8588
8589 pub fn parse_optional_type_modifiers(&mut self) -> Result<Option<Vec<String>>, ParserError> {
8590 if self.consume_token(&Token::LParen) {
8591 let mut modifiers = Vec::new();
8592 loop {
8593 let next_token = self.next_token();
8594 match next_token.token {
8595 Token::Word(w) => modifiers.push(w.to_string()),
8596 Token::Number(n, _) => modifiers.push(n),
8597 Token::SingleQuotedString(s) => modifiers.push(s),
8598
8599 Token::Comma => {
8600 continue;
8601 }
8602 Token::RParen => {
8603 break;
8604 }
8605 _ => self.expected("type modifiers", next_token)?,
8606 }
8607 }
8608
8609 Ok(Some(modifiers))
8610 } else {
8611 Ok(None)
8612 }
8613 }
8614
8615 fn parse_sub_type<F>(&mut self, parent_type: F) -> Result<DataType, ParserError>
8617 where
8618 F: FnOnce(Box<DataType>) -> DataType,
8619 {
8620 self.expect_token(&Token::LParen)?;
8621 let inside_type = self.parse_data_type()?;
8622 self.expect_token(&Token::RParen)?;
8623 Ok(parent_type(inside_type.into()))
8624 }
8625
8626 pub fn parse_delete(&mut self) -> Result<Statement, ParserError> {
8627 let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) {
8628 if dialect_of!(self is BigQueryDialect | GenericDialect) {
8631 (vec![], false)
8632 } else {
8633 let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
8634 self.expect_keyword(Keyword::FROM)?;
8635 (tables, true)
8636 }
8637 } else {
8638 (vec![], true)
8639 };
8640
8641 let from = self.parse_comma_separated(Parser::parse_table_and_joins)?;
8642 let using = if self.parse_keyword(Keyword::USING) {
8643 Some(self.parse_comma_separated(Parser::parse_table_and_joins)?)
8644 } else {
8645 None
8646 };
8647 let selection = if self.parse_keyword(Keyword::WHERE) {
8648 Some(self.parse_expr()?)
8649 } else {
8650 None
8651 };
8652 let returning = if self.parse_keyword(Keyword::RETURNING) {
8653 Some(self.parse_comma_separated(Parser::parse_select_item)?)
8654 } else {
8655 None
8656 };
8657 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
8658 self.parse_comma_separated(Parser::parse_order_by_expr)?
8659 } else {
8660 vec![]
8661 };
8662 let limit = if self.parse_keyword(Keyword::LIMIT) {
8663 self.parse_limit()?
8664 } else {
8665 None
8666 };
8667
8668 Ok(Statement::Delete(Delete {
8669 tables,
8670 from: if with_from_keyword {
8671 FromTable::WithFromKeyword(from)
8672 } else {
8673 FromTable::WithoutKeyword(from)
8674 },
8675 using,
8676 selection,
8677 returning,
8678 order_by,
8679 limit,
8680 }))
8681 }
8682
8683 pub fn parse_kill(&mut self) -> Result<Statement, ParserError> {
8685 let modifier_keyword =
8686 self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]);
8687
8688 let id = self.parse_literal_uint()?;
8689
8690 let modifier = match modifier_keyword {
8691 Some(Keyword::CONNECTION) => Some(KillType::Connection),
8692 Some(Keyword::QUERY) => Some(KillType::Query),
8693 Some(Keyword::MUTATION) => {
8694 if dialect_of!(self is ClickHouseDialect | GenericDialect) {
8695 Some(KillType::Mutation)
8696 } else {
8697 self.expected(
8698 "Unsupported type for KILL, allowed: CONNECTION | QUERY",
8699 self.peek_token(),
8700 )?
8701 }
8702 }
8703 _ => None,
8704 };
8705
8706 Ok(Statement::Kill { modifier, id })
8707 }
8708
8709 pub fn parse_explain(
8710 &mut self,
8711 describe_alias: DescribeAlias,
8712 ) -> Result<Statement, ParserError> {
8713 let mut analyze = false;
8714 let mut verbose = false;
8715 let mut query_plan = false;
8716 let mut format = None;
8717 let mut options = None;
8718
8719 if describe_alias == DescribeAlias::Explain
8722 && self.dialect.supports_explain_with_utility_options()
8723 && self.peek_token().token == Token::LParen
8724 {
8725 options = Some(self.parse_utility_options()?)
8726 } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) {
8727 query_plan = true;
8728 } else {
8729 analyze = self.parse_keyword(Keyword::ANALYZE);
8730 verbose = self.parse_keyword(Keyword::VERBOSE);
8731 if self.parse_keyword(Keyword::FORMAT) {
8732 format = Some(self.parse_analyze_format()?);
8733 }
8734 }
8735
8736 match self.maybe_parse(|parser| parser.parse_statement())? {
8737 Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
8738 ParserError::ParserError("Explain must be root of the plan".to_string()),
8739 ),
8740 Some(statement) => Ok(Statement::Explain {
8741 describe_alias,
8742 analyze,
8743 verbose,
8744 query_plan,
8745 statement: Box::new(statement),
8746 format,
8747 options,
8748 }),
8749 _ => {
8750 let hive_format =
8751 match self.parse_one_of_keywords(&[Keyword::EXTENDED, Keyword::FORMATTED]) {
8752 Some(Keyword::EXTENDED) => Some(HiveDescribeFormat::Extended),
8753 Some(Keyword::FORMATTED) => Some(HiveDescribeFormat::Formatted),
8754 _ => None,
8755 };
8756
8757 let has_table_keyword = if self.dialect.describe_requires_table_keyword() {
8758 self.parse_keyword(Keyword::TABLE)
8760 } else {
8761 false
8762 };
8763
8764 let table_name = self.parse_object_name(false)?;
8765 Ok(Statement::ExplainTable {
8766 describe_alias,
8767 hive_format,
8768 has_table_keyword,
8769 table_name,
8770 })
8771 }
8772 }
8773 }
8774
8775 pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
8780 let _guard = self.recursion_counter.try_decrease()?;
8781 let with = if self.parse_keyword(Keyword::WITH) {
8782 Some(With {
8783 recursive: self.parse_keyword(Keyword::RECURSIVE),
8784 cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
8785 })
8786 } else {
8787 None
8788 };
8789 if self.parse_keyword(Keyword::INSERT) {
8790 Ok(Query {
8791 with,
8792 body: self.parse_insert_setexpr_boxed()?,
8793 limit: None,
8794 limit_by: vec![],
8795 order_by: None,
8796 offset: None,
8797 fetch: None,
8798 locks: vec![],
8799 for_clause: None,
8800 settings: None,
8801 format_clause: None,
8802 }
8803 .into())
8804 } else if self.parse_keyword(Keyword::UPDATE) {
8805 Ok(Query {
8806 with,
8807 body: self.parse_update_setexpr_boxed()?,
8808 limit: None,
8809 limit_by: vec![],
8810 order_by: None,
8811 offset: None,
8812 fetch: None,
8813 locks: vec![],
8814 for_clause: None,
8815 settings: None,
8816 format_clause: None,
8817 }
8818 .into())
8819 } else {
8820 let body = self.parse_query_body(self.dialect.prec_unknown())?;
8821
8822 let order_by = self.parse_optional_order_by()?;
8823
8824 let mut limit = None;
8825 let mut offset = None;
8826
8827 for _x in 0..2 {
8828 if limit.is_none() && self.parse_keyword(Keyword::LIMIT) {
8829 limit = self.parse_limit()?
8830 }
8831
8832 if offset.is_none() && self.parse_keyword(Keyword::OFFSET) {
8833 offset = Some(self.parse_offset()?)
8834 }
8835
8836 if self.dialect.supports_limit_comma()
8837 && limit.is_some()
8838 && offset.is_none()
8839 && self.consume_token(&Token::Comma)
8840 {
8841 offset = Some(Offset {
8844 value: limit.unwrap(),
8845 rows: OffsetRows::None,
8846 });
8847 limit = Some(self.parse_expr()?);
8848 }
8849 }
8850
8851 let limit_by = if dialect_of!(self is ClickHouseDialect | GenericDialect)
8852 && self.parse_keyword(Keyword::BY)
8853 {
8854 self.parse_comma_separated(Parser::parse_expr)?
8855 } else {
8856 vec![]
8857 };
8858
8859 let settings = self.parse_settings()?;
8860
8861 let fetch = if self.parse_keyword(Keyword::FETCH) {
8862 Some(self.parse_fetch()?)
8863 } else {
8864 None
8865 };
8866
8867 let mut for_clause = None;
8868 let mut locks = Vec::new();
8869 while self.parse_keyword(Keyword::FOR) {
8870 if let Some(parsed_for_clause) = self.parse_for_clause()? {
8871 for_clause = Some(parsed_for_clause);
8872 break;
8873 } else {
8874 locks.push(self.parse_lock()?);
8875 }
8876 }
8877 let format_clause = if dialect_of!(self is ClickHouseDialect | GenericDialect)
8878 && self.parse_keyword(Keyword::FORMAT)
8879 {
8880 if self.parse_keyword(Keyword::NULL) {
8881 Some(FormatClause::Null)
8882 } else {
8883 let ident = self.parse_identifier(false)?;
8884 Some(FormatClause::Identifier(ident))
8885 }
8886 } else {
8887 None
8888 };
8889
8890 Ok(Query {
8891 with,
8892 body,
8893 order_by,
8894 limit,
8895 limit_by,
8896 offset,
8897 fetch,
8898 locks,
8899 for_clause,
8900 settings,
8901 format_clause,
8902 }
8903 .into())
8904 }
8905 }
8906
8907 fn parse_settings(&mut self) -> Result<Option<Vec<Setting>>, ParserError> {
8908 let settings = if dialect_of!(self is ClickHouseDialect|GenericDialect)
8909 && self.parse_keyword(Keyword::SETTINGS)
8910 {
8911 let key_values = self.parse_comma_separated(|p| {
8912 let key = p.parse_identifier(false)?;
8913 p.expect_token(&Token::Eq)?;
8914 let value = p.parse_value()?;
8915 Ok(Setting { key, value })
8916 })?;
8917 Some(key_values)
8918 } else {
8919 None
8920 };
8921 Ok(settings)
8922 }
8923
8924 pub fn parse_for_clause(&mut self) -> Result<Option<ForClause>, ParserError> {
8926 if self.parse_keyword(Keyword::XML) {
8927 Ok(Some(self.parse_for_xml()?))
8928 } else if self.parse_keyword(Keyword::JSON) {
8929 Ok(Some(self.parse_for_json()?))
8930 } else if self.parse_keyword(Keyword::BROWSE) {
8931 Ok(Some(ForClause::Browse))
8932 } else {
8933 Ok(None)
8934 }
8935 }
8936
8937 pub fn parse_for_xml(&mut self) -> Result<ForClause, ParserError> {
8939 let for_xml = if self.parse_keyword(Keyword::RAW) {
8940 let mut element_name = None;
8941 if self.peek_token().token == Token::LParen {
8942 self.expect_token(&Token::LParen)?;
8943 element_name = Some(self.parse_literal_string()?);
8944 self.expect_token(&Token::RParen)?;
8945 }
8946 ForXml::Raw(element_name)
8947 } else if self.parse_keyword(Keyword::AUTO) {
8948 ForXml::Auto
8949 } else if self.parse_keyword(Keyword::EXPLICIT) {
8950 ForXml::Explicit
8951 } else if self.parse_keyword(Keyword::PATH) {
8952 let mut element_name = None;
8953 if self.peek_token().token == Token::LParen {
8954 self.expect_token(&Token::LParen)?;
8955 element_name = Some(self.parse_literal_string()?);
8956 self.expect_token(&Token::RParen)?;
8957 }
8958 ForXml::Path(element_name)
8959 } else {
8960 return Err(ParserError::ParserError(
8961 "Expected FOR XML [RAW | AUTO | EXPLICIT | PATH ]".to_string(),
8962 ));
8963 };
8964 let mut elements = false;
8965 let mut binary_base64 = false;
8966 let mut root = None;
8967 let mut r#type = false;
8968 while self.peek_token().token == Token::Comma {
8969 self.next_token();
8970 if self.parse_keyword(Keyword::ELEMENTS) {
8971 elements = true;
8972 } else if self.parse_keyword(Keyword::BINARY) {
8973 self.expect_keyword(Keyword::BASE64)?;
8974 binary_base64 = true;
8975 } else if self.parse_keyword(Keyword::ROOT) {
8976 self.expect_token(&Token::LParen)?;
8977 root = Some(self.parse_literal_string()?);
8978 self.expect_token(&Token::RParen)?;
8979 } else if self.parse_keyword(Keyword::TYPE) {
8980 r#type = true;
8981 }
8982 }
8983 Ok(ForClause::Xml {
8984 for_xml,
8985 elements,
8986 binary_base64,
8987 root,
8988 r#type,
8989 })
8990 }
8991
8992 pub fn parse_for_json(&mut self) -> Result<ForClause, ParserError> {
8994 let for_json = if self.parse_keyword(Keyword::AUTO) {
8995 ForJson::Auto
8996 } else if self.parse_keyword(Keyword::PATH) {
8997 ForJson::Path
8998 } else {
8999 return Err(ParserError::ParserError(
9000 "Expected FOR JSON [AUTO | PATH ]".to_string(),
9001 ));
9002 };
9003 let mut root = None;
9004 let mut include_null_values = false;
9005 let mut without_array_wrapper = false;
9006 while self.peek_token().token == Token::Comma {
9007 self.next_token();
9008 if self.parse_keyword(Keyword::ROOT) {
9009 self.expect_token(&Token::LParen)?;
9010 root = Some(self.parse_literal_string()?);
9011 self.expect_token(&Token::RParen)?;
9012 } else if self.parse_keyword(Keyword::INCLUDE_NULL_VALUES) {
9013 include_null_values = true;
9014 } else if self.parse_keyword(Keyword::WITHOUT_ARRAY_WRAPPER) {
9015 without_array_wrapper = true;
9016 }
9017 }
9018 Ok(ForClause::Json {
9019 for_json,
9020 root,
9021 include_null_values,
9022 without_array_wrapper,
9023 })
9024 }
9025
9026 pub fn parse_cte(&mut self) -> Result<Cte, ParserError> {
9028 let name = self.parse_identifier(false)?;
9029
9030 let mut cte = if self.parse_keyword(Keyword::AS) {
9031 let mut is_materialized = None;
9032 if dialect_of!(self is PostgreSqlDialect) {
9033 if self.parse_keyword(Keyword::MATERIALIZED) {
9034 is_materialized = Some(CteAsMaterialized::Materialized);
9035 } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
9036 is_materialized = Some(CteAsMaterialized::NotMaterialized);
9037 }
9038 }
9039 self.expect_token(&Token::LParen)?;
9040 let query = self.parse_query()?;
9041 self.expect_token(&Token::RParen)?;
9042 let alias = TableAlias {
9043 name,
9044 columns: vec![],
9045 };
9046 Cte {
9047 alias,
9048 query,
9049 from: None,
9050 materialized: is_materialized,
9051 }
9052 } else {
9053 let columns = self.parse_parenthesized_column_list(Optional, false)?;
9054 self.expect_keyword(Keyword::AS)?;
9055 let mut is_materialized = None;
9056 if dialect_of!(self is PostgreSqlDialect) {
9057 if self.parse_keyword(Keyword::MATERIALIZED) {
9058 is_materialized = Some(CteAsMaterialized::Materialized);
9059 } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
9060 is_materialized = Some(CteAsMaterialized::NotMaterialized);
9061 }
9062 }
9063 self.expect_token(&Token::LParen)?;
9064 let query = self.parse_query()?;
9065 self.expect_token(&Token::RParen)?;
9066 let alias = TableAlias { name, columns };
9067 Cte {
9068 alias,
9069 query,
9070 from: None,
9071 materialized: is_materialized,
9072 }
9073 };
9074 if self.parse_keyword(Keyword::FROM) {
9075 cte.from = Some(self.parse_identifier(false)?);
9076 }
9077 Ok(cte)
9078 }
9079
9080 pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
9089 let expr = if self.parse_keyword(Keyword::SELECT) {
9092 SetExpr::Select(self.parse_select().map(Box::new)?)
9093 } else if self.consume_token(&Token::LParen) {
9094 let subquery = self.parse_query()?;
9096 self.expect_token(&Token::RParen)?;
9097 SetExpr::Query(subquery)
9098 } else if self.parse_keyword(Keyword::VALUES) {
9099 let is_mysql = dialect_of!(self is MySqlDialect);
9100 SetExpr::Values(self.parse_values(is_mysql)?)
9101 } else if self.parse_keyword(Keyword::TABLE) {
9102 SetExpr::Table(Box::new(self.parse_as_table()?))
9103 } else {
9104 return self.expected(
9105 "SELECT, VALUES, or a subquery in the query body",
9106 self.peek_token(),
9107 );
9108 };
9109
9110 self.parse_remaining_set_exprs(expr, precedence)
9111 }
9112
9113 fn parse_remaining_set_exprs(
9117 &mut self,
9118 mut expr: SetExpr,
9119 precedence: u8,
9120 ) -> Result<Box<SetExpr>, ParserError> {
9121 loop {
9122 let op = self.parse_set_operator(&self.peek_token().token);
9124 let next_precedence = match op {
9125 Some(SetOperator::Union) | Some(SetOperator::Except) => 10,
9127 Some(SetOperator::Intersect) => 20,
9129 None => break,
9131 };
9132 if precedence >= next_precedence {
9133 break;
9134 }
9135 self.next_token(); let set_quantifier = self.parse_set_quantifier(&op);
9137 expr = SetExpr::SetOperation {
9138 left: Box::new(expr),
9139 op: op.unwrap(),
9140 set_quantifier,
9141 right: self.parse_query_body(next_precedence)?,
9142 };
9143 }
9144
9145 Ok(expr.into())
9146 }
9147
9148 pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
9149 match token {
9150 Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union),
9151 Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except),
9152 Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect),
9153 _ => None,
9154 }
9155 }
9156
9157 pub fn parse_set_quantifier(&mut self, op: &Option<SetOperator>) -> SetQuantifier {
9158 match op {
9159 Some(SetOperator::Except | SetOperator::Intersect | SetOperator::Union) => {
9160 if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) {
9161 SetQuantifier::DistinctByName
9162 } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
9163 SetQuantifier::ByName
9164 } else if self.parse_keyword(Keyword::ALL) {
9165 if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
9166 SetQuantifier::AllByName
9167 } else {
9168 SetQuantifier::All
9169 }
9170 } else if self.parse_keyword(Keyword::DISTINCT) {
9171 SetQuantifier::Distinct
9172 } else {
9173 SetQuantifier::None
9174 }
9175 }
9176 _ => SetQuantifier::None,
9177 }
9178 }
9179
9180 pub fn parse_select(&mut self) -> Result<Select, ParserError> {
9183 let value_table_mode =
9184 if dialect_of!(self is BigQueryDialect) && self.parse_keyword(Keyword::AS) {
9185 if self.parse_keyword(Keyword::VALUE) {
9186 Some(ValueTableMode::AsValue)
9187 } else if self.parse_keyword(Keyword::STRUCT) {
9188 Some(ValueTableMode::AsStruct)
9189 } else {
9190 self.expected("VALUE or STRUCT", self.peek_token())?
9191 }
9192 } else {
9193 None
9194 };
9195
9196 let mut top_before_distinct = false;
9197 let mut top = None;
9198 if self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
9199 top = Some(self.parse_top()?);
9200 top_before_distinct = true;
9201 }
9202 let distinct = self.parse_all_or_distinct()?;
9203 if !self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
9204 top = Some(self.parse_top()?);
9205 }
9206
9207 let projection = self.parse_projection()?;
9208
9209 let into = if self.parse_keyword(Keyword::INTO) {
9210 let temporary = self
9211 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
9212 .is_some();
9213 let unlogged = self.parse_keyword(Keyword::UNLOGGED);
9214 let table = self.parse_keyword(Keyword::TABLE);
9215 let name = self.parse_object_name(false)?;
9216 Some(SelectInto {
9217 temporary,
9218 unlogged,
9219 table,
9220 name,
9221 })
9222 } else {
9223 None
9224 };
9225
9226 let from = if self.parse_keyword(Keyword::FROM) {
9232 self.parse_comma_separated(Parser::parse_table_and_joins)?
9233 } else {
9234 vec![]
9235 };
9236
9237 let mut lateral_views = vec![];
9238 loop {
9239 if self.parse_keywords(&[Keyword::LATERAL, Keyword::VIEW]) {
9240 let outer = self.parse_keyword(Keyword::OUTER);
9241 let lateral_view = self.parse_expr()?;
9242 let lateral_view_name = self.parse_object_name(false)?;
9243 let lateral_col_alias = self
9244 .parse_comma_separated(|parser| {
9245 parser.parse_optional_alias(&[
9246 Keyword::WHERE,
9247 Keyword::GROUP,
9248 Keyword::CLUSTER,
9249 Keyword::HAVING,
9250 Keyword::LATERAL,
9251 ]) })?
9253 .into_iter()
9254 .flatten()
9255 .collect();
9256
9257 lateral_views.push(LateralView {
9258 lateral_view,
9259 lateral_view_name,
9260 lateral_col_alias,
9261 outer,
9262 });
9263 } else {
9264 break;
9265 }
9266 }
9267
9268 let prewhere = if dialect_of!(self is ClickHouseDialect|GenericDialect)
9269 && self.parse_keyword(Keyword::PREWHERE)
9270 {
9271 Some(self.parse_expr()?)
9272 } else {
9273 None
9274 };
9275
9276 let selection = if self.parse_keyword(Keyword::WHERE) {
9277 Some(self.parse_expr()?)
9278 } else {
9279 None
9280 };
9281
9282 let group_by = self
9283 .parse_optional_group_by()?
9284 .unwrap_or_else(|| GroupByExpr::Expressions(vec![], vec![]));
9285
9286 let cluster_by = if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
9287 self.parse_comma_separated(Parser::parse_expr)?
9288 } else {
9289 vec![]
9290 };
9291
9292 let distribute_by = if self.parse_keywords(&[Keyword::DISTRIBUTE, Keyword::BY]) {
9293 self.parse_comma_separated(Parser::parse_expr)?
9294 } else {
9295 vec![]
9296 };
9297
9298 let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
9299 self.parse_comma_separated(Parser::parse_expr)?
9300 } else {
9301 vec![]
9302 };
9303
9304 let having = if self.parse_keyword(Keyword::HAVING) {
9305 Some(self.parse_expr()?)
9306 } else {
9307 None
9308 };
9309
9310 let (named_windows, qualify, window_before_qualify) = if self.parse_keyword(Keyword::WINDOW)
9312 {
9313 let named_windows = self.parse_comma_separated(Parser::parse_named_window)?;
9314 if self.parse_keyword(Keyword::QUALIFY) {
9315 (named_windows, Some(self.parse_expr()?), true)
9316 } else {
9317 (named_windows, None, true)
9318 }
9319 } else if self.parse_keyword(Keyword::QUALIFY) {
9320 let qualify = Some(self.parse_expr()?);
9321 if self.parse_keyword(Keyword::WINDOW) {
9322 (
9323 self.parse_comma_separated(Parser::parse_named_window)?,
9324 qualify,
9325 false,
9326 )
9327 } else {
9328 (Default::default(), qualify, false)
9329 }
9330 } else {
9331 Default::default()
9332 };
9333
9334 let connect_by = if self.dialect.supports_connect_by()
9335 && self
9336 .parse_one_of_keywords(&[Keyword::START, Keyword::CONNECT])
9337 .is_some()
9338 {
9339 self.prev_token();
9340 Some(self.parse_connect_by()?)
9341 } else {
9342 None
9343 };
9344
9345 Ok(Select {
9346 distinct,
9347 top,
9348 top_before_distinct,
9349 projection,
9350 into,
9351 from,
9352 lateral_views,
9353 prewhere,
9354 selection,
9355 group_by,
9356 cluster_by,
9357 distribute_by,
9358 sort_by,
9359 having,
9360 named_window: named_windows,
9361 window_before_qualify,
9362 qualify,
9363 value_table_mode,
9364 connect_by,
9365 })
9366 }
9367
9368 fn with_state<T, F>(&mut self, state: ParserState, mut f: F) -> Result<T, ParserError>
9372 where
9373 F: FnMut(&mut Parser) -> Result<T, ParserError>,
9374 {
9375 let current_state = self.state;
9376 self.state = state;
9377 let res = f(self);
9378 self.state = current_state;
9379 res
9380 }
9381
9382 pub fn parse_connect_by(&mut self) -> Result<ConnectBy, ParserError> {
9383 let (condition, relationships) = if self.parse_keywords(&[Keyword::CONNECT, Keyword::BY]) {
9384 let relationships = self.with_state(ParserState::ConnectBy, |parser| {
9385 parser.parse_comma_separated(Parser::parse_expr)
9386 })?;
9387 self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
9388 let condition = self.parse_expr()?;
9389 (condition, relationships)
9390 } else {
9391 self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
9392 let condition = self.parse_expr()?;
9393 self.expect_keywords(&[Keyword::CONNECT, Keyword::BY])?;
9394 let relationships = self.with_state(ParserState::ConnectBy, |parser| {
9395 parser.parse_comma_separated(Parser::parse_expr)
9396 })?;
9397 (condition, relationships)
9398 };
9399 Ok(ConnectBy {
9400 condition,
9401 relationships,
9402 })
9403 }
9404
9405 pub fn parse_as_table(&mut self) -> Result<Table, ParserError> {
9407 let token1 = self.next_token();
9408 let token2 = self.next_token();
9409 let token3 = self.next_token();
9410
9411 let table_name;
9412 let schema_name;
9413 if token2 == Token::Period {
9414 match token1.token {
9415 Token::Word(w) => {
9416 schema_name = w.value;
9417 }
9418 _ => {
9419 return self.expected("Schema name", token1);
9420 }
9421 }
9422 match token3.token {
9423 Token::Word(w) => {
9424 table_name = w.value;
9425 }
9426 _ => {
9427 return self.expected("Table name", token3);
9428 }
9429 }
9430 Ok(Table {
9431 table_name: Some(table_name),
9432 schema_name: Some(schema_name),
9433 })
9434 } else {
9435 match token1.token {
9436 Token::Word(w) => {
9437 table_name = w.value;
9438 }
9439 _ => {
9440 return self.expected("Table name", token1);
9441 }
9442 }
9443 Ok(Table {
9444 table_name: Some(table_name),
9445 schema_name: None,
9446 })
9447 }
9448 }
9449
9450 fn parse_set_role(&mut self, modifier: Option<Keyword>) -> Result<Statement, ParserError> {
9452 self.expect_keyword(Keyword::ROLE)?;
9453 let context_modifier = match modifier {
9454 Some(Keyword::LOCAL) => ContextModifier::Local,
9455 Some(Keyword::SESSION) => ContextModifier::Session,
9456 _ => ContextModifier::None,
9457 };
9458
9459 let role_name = if self.parse_keyword(Keyword::NONE) {
9460 None
9461 } else {
9462 Some(self.parse_identifier(false)?)
9463 };
9464 Ok(Statement::SetRole {
9465 context_modifier,
9466 role_name,
9467 })
9468 }
9469
9470 pub fn parse_set(&mut self) -> Result<Statement, ParserError> {
9471 let modifier =
9472 self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::HIVEVAR]);
9473 if let Some(Keyword::HIVEVAR) = modifier {
9474 self.expect_token(&Token::Colon)?;
9475 } else if let Some(set_role_stmt) =
9476 self.maybe_parse(|parser| parser.parse_set_role(modifier))?
9477 {
9478 return Ok(set_role_stmt);
9479 }
9480
9481 let variables = if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE]) {
9482 OneOrManyWithParens::One(ObjectName(vec!["TIMEZONE".into()]))
9483 } else if self.dialect.supports_parenthesized_set_variables()
9484 && self.consume_token(&Token::LParen)
9485 {
9486 let variables = OneOrManyWithParens::Many(
9487 self.parse_comma_separated(|parser: &mut Parser<'a>| {
9488 parser.parse_identifier(false)
9489 })?
9490 .into_iter()
9491 .map(|ident| ObjectName(vec![ident]))
9492 .collect(),
9493 );
9494 self.expect_token(&Token::RParen)?;
9495 variables
9496 } else {
9497 OneOrManyWithParens::One(self.parse_object_name(false)?)
9498 };
9499
9500 if matches!(&variables, OneOrManyWithParens::One(variable) if variable.to_string().eq_ignore_ascii_case("NAMES")
9501 && dialect_of!(self is MySqlDialect | GenericDialect))
9502 {
9503 if self.parse_keyword(Keyword::DEFAULT) {
9504 return Ok(Statement::SetNamesDefault {});
9505 }
9506
9507 let charset_name = self.parse_literal_string()?;
9508 let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() {
9509 Some(self.parse_literal_string()?)
9510 } else {
9511 None
9512 };
9513
9514 return Ok(Statement::SetNames {
9515 charset_name,
9516 collation_name,
9517 });
9518 }
9519
9520 let parenthesized_assignment = matches!(&variables, OneOrManyWithParens::Many(_));
9521
9522 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
9523 if parenthesized_assignment {
9524 self.expect_token(&Token::LParen)?;
9525 }
9526
9527 let mut values = vec![];
9528 loop {
9529 let value = if let Some(expr) = self.try_parse_expr_sub_query()? {
9530 expr
9531 } else if let Ok(expr) = self.parse_expr() {
9532 expr
9533 } else {
9534 self.expected("variable value", self.peek_token())?
9535 };
9536
9537 values.push(value);
9538 if self.consume_token(&Token::Comma) {
9539 continue;
9540 }
9541
9542 if parenthesized_assignment {
9543 self.expect_token(&Token::RParen)?;
9544 }
9545 return Ok(Statement::SetVariable {
9546 local: modifier == Some(Keyword::LOCAL),
9547 hivevar: Some(Keyword::HIVEVAR) == modifier,
9548 variables,
9549 value: values,
9550 });
9551 }
9552 }
9553
9554 let OneOrManyWithParens::One(variable) = variables else {
9555 return self.expected("set variable", self.peek_token());
9556 };
9557
9558 if variable.to_string().eq_ignore_ascii_case("TIMEZONE") {
9559 match self.parse_expr() {
9561 Ok(expr) => Ok(Statement::SetTimeZone {
9562 local: modifier == Some(Keyword::LOCAL),
9563 value: expr,
9564 }),
9565 _ => self.expected("timezone value", self.peek_token())?,
9566 }
9567 } else if variable.to_string() == "CHARACTERISTICS" {
9568 self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?;
9569 Ok(Statement::SetTransaction {
9570 modes: self.parse_transaction_modes()?,
9571 snapshot: None,
9572 session: true,
9573 })
9574 } else if variable.to_string() == "TRANSACTION" && modifier.is_none() {
9575 if self.parse_keyword(Keyword::SNAPSHOT) {
9576 let snapshot_id = self.parse_value()?;
9577 return Ok(Statement::SetTransaction {
9578 modes: vec![],
9579 snapshot: Some(snapshot_id),
9580 session: false,
9581 });
9582 }
9583 Ok(Statement::SetTransaction {
9584 modes: self.parse_transaction_modes()?,
9585 snapshot: None,
9586 session: false,
9587 })
9588 } else {
9589 self.expected("equals sign or TO", self.peek_token())
9590 }
9591 }
9592
9593 pub fn parse_show(&mut self) -> Result<Statement, ParserError> {
9594 let extended = self.parse_keyword(Keyword::EXTENDED);
9595 let full = self.parse_keyword(Keyword::FULL);
9596 let session = self.parse_keyword(Keyword::SESSION);
9597 let global = self.parse_keyword(Keyword::GLOBAL);
9598 if self
9599 .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS])
9600 .is_some()
9601 {
9602 Ok(self.parse_show_columns(extended, full)?)
9603 } else if self.parse_keyword(Keyword::TABLES) {
9604 Ok(self.parse_show_tables(extended, full)?)
9605 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) {
9606 Ok(self.parse_show_views(true)?)
9607 } else if self.parse_keyword(Keyword::VIEWS) {
9608 Ok(self.parse_show_views(false)?)
9609 } else if self.parse_keyword(Keyword::FUNCTIONS) {
9610 Ok(self.parse_show_functions()?)
9611 } else if extended || full {
9612 Err(ParserError::ParserError(
9613 "EXTENDED/FULL are not supported with this type of SHOW query".to_string(),
9614 ))
9615 } else if self.parse_one_of_keywords(&[Keyword::CREATE]).is_some() {
9616 Ok(self.parse_show_create()?)
9617 } else if self.parse_keyword(Keyword::COLLATION) {
9618 Ok(self.parse_show_collation()?)
9619 } else if self.parse_keyword(Keyword::VARIABLES)
9620 && dialect_of!(self is MySqlDialect | GenericDialect)
9621 {
9622 Ok(Statement::ShowVariables {
9623 filter: self.parse_show_statement_filter()?,
9624 session,
9625 global,
9626 })
9627 } else if self.parse_keyword(Keyword::STATUS)
9628 && dialect_of!(self is MySqlDialect | GenericDialect)
9629 {
9630 Ok(Statement::ShowStatus {
9631 filter: self.parse_show_statement_filter()?,
9632 session,
9633 global,
9634 })
9635 } else if self.parse_keyword(Keyword::DATABASES) {
9636 self.parse_show_databases()
9637 } else if self.parse_keyword(Keyword::SCHEMAS) {
9638 self.parse_show_schemas()
9639 } else {
9640 Ok(Statement::ShowVariable {
9641 variable: self.parse_identifiers()?,
9642 })
9643 }
9644 }
9645
9646 fn parse_show_databases(&mut self) -> Result<Statement, ParserError> {
9647 Ok(Statement::ShowDatabases {
9648 filter: self.parse_show_statement_filter()?,
9649 })
9650 }
9651
9652 fn parse_show_schemas(&mut self) -> Result<Statement, ParserError> {
9653 Ok(Statement::ShowSchemas {
9654 filter: self.parse_show_statement_filter()?,
9655 })
9656 }
9657
9658 pub fn parse_show_create(&mut self) -> Result<Statement, ParserError> {
9659 let obj_type = match self.expect_one_of_keywords(&[
9660 Keyword::TABLE,
9661 Keyword::TRIGGER,
9662 Keyword::FUNCTION,
9663 Keyword::PROCEDURE,
9664 Keyword::EVENT,
9665 Keyword::VIEW,
9666 ])? {
9667 Keyword::TABLE => Ok(ShowCreateObject::Table),
9668 Keyword::TRIGGER => Ok(ShowCreateObject::Trigger),
9669 Keyword::FUNCTION => Ok(ShowCreateObject::Function),
9670 Keyword::PROCEDURE => Ok(ShowCreateObject::Procedure),
9671 Keyword::EVENT => Ok(ShowCreateObject::Event),
9672 Keyword::VIEW => Ok(ShowCreateObject::View),
9673 keyword => Err(ParserError::ParserError(format!(
9674 "Unable to map keyword to ShowCreateObject: {keyword:?}"
9675 ))),
9676 }?;
9677
9678 let obj_name = self.parse_object_name(false)?;
9679
9680 Ok(Statement::ShowCreate { obj_type, obj_name })
9681 }
9682
9683 pub fn parse_show_columns(
9684 &mut self,
9685 extended: bool,
9686 full: bool,
9687 ) -> Result<Statement, ParserError> {
9688 self.expect_one_of_keywords(&[Keyword::FROM, Keyword::IN])?;
9689 let object_name = self.parse_object_name(false)?;
9690 let table_name = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
9691 Some(_) => {
9692 let db_name = vec![self.parse_identifier(false)?];
9693 let ObjectName(table_name) = object_name;
9694 let object_name = db_name.into_iter().chain(table_name).collect();
9695 ObjectName(object_name)
9696 }
9697 None => object_name,
9698 };
9699 let filter = self.parse_show_statement_filter()?;
9700 Ok(Statement::ShowColumns {
9701 extended,
9702 full,
9703 table_name,
9704 filter,
9705 })
9706 }
9707
9708 pub fn parse_show_tables(
9709 &mut self,
9710 extended: bool,
9711 full: bool,
9712 ) -> Result<Statement, ParserError> {
9713 let (clause, db_name) = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
9714 Some(Keyword::FROM) => (Some(ShowClause::FROM), Some(self.parse_identifier(false)?)),
9715 Some(Keyword::IN) => (Some(ShowClause::IN), Some(self.parse_identifier(false)?)),
9716 _ => (None, None),
9717 };
9718 let filter = self.parse_show_statement_filter()?;
9719 Ok(Statement::ShowTables {
9720 extended,
9721 full,
9722 clause,
9723 db_name,
9724 filter,
9725 })
9726 }
9727
9728 fn parse_show_views(&mut self, materialized: bool) -> Result<Statement, ParserError> {
9729 let (clause, db_name) = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
9730 Some(Keyword::FROM) => (Some(ShowClause::FROM), Some(self.parse_identifier(false)?)),
9731 Some(Keyword::IN) => (Some(ShowClause::IN), Some(self.parse_identifier(false)?)),
9732 _ => (None, None),
9733 };
9734 let filter = self.parse_show_statement_filter()?;
9735 Ok(Statement::ShowViews {
9736 materialized,
9737 clause,
9738 db_name,
9739 filter,
9740 })
9741 }
9742
9743 pub fn parse_show_functions(&mut self) -> Result<Statement, ParserError> {
9744 let filter = self.parse_show_statement_filter()?;
9745 Ok(Statement::ShowFunctions { filter })
9746 }
9747
9748 pub fn parse_show_collation(&mut self) -> Result<Statement, ParserError> {
9749 let filter = self.parse_show_statement_filter()?;
9750 Ok(Statement::ShowCollation { filter })
9751 }
9752
9753 pub fn parse_show_statement_filter(
9754 &mut self,
9755 ) -> Result<Option<ShowStatementFilter>, ParserError> {
9756 if self.parse_keyword(Keyword::LIKE) {
9757 Ok(Some(ShowStatementFilter::Like(
9758 self.parse_literal_string()?,
9759 )))
9760 } else if self.parse_keyword(Keyword::ILIKE) {
9761 Ok(Some(ShowStatementFilter::ILike(
9762 self.parse_literal_string()?,
9763 )))
9764 } else if self.parse_keyword(Keyword::WHERE) {
9765 Ok(Some(ShowStatementFilter::Where(self.parse_expr()?)))
9766 } else {
9767 self.maybe_parse(|parser| -> Result<String, ParserError> {
9768 parser.parse_literal_string()
9769 })?
9770 .map_or(Ok(None), |filter| {
9771 Ok(Some(ShowStatementFilter::NoKeyword(filter)))
9772 })
9773 }
9774 }
9775
9776 pub fn parse_use(&mut self) -> Result<Statement, ParserError> {
9777 let parsed_keyword = if dialect_of!(self is HiveDialect) {
9779 if self.parse_keyword(Keyword::DEFAULT) {
9781 return Ok(Statement::Use(Use::Default));
9782 }
9783 None } else if dialect_of!(self is DatabricksDialect) {
9785 self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA])
9786 } else if dialect_of!(self is SnowflakeDialect) {
9787 self.parse_one_of_keywords(&[Keyword::DATABASE, Keyword::SCHEMA, Keyword::WAREHOUSE])
9788 } else {
9789 None };
9791
9792 let obj_name = self.parse_object_name(false)?;
9793 let result = match parsed_keyword {
9794 Some(Keyword::CATALOG) => Use::Catalog(obj_name),
9795 Some(Keyword::DATABASE) => Use::Database(obj_name),
9796 Some(Keyword::SCHEMA) => Use::Schema(obj_name),
9797 Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name),
9798 _ => Use::Object(obj_name),
9799 };
9800
9801 Ok(Statement::Use(result))
9802 }
9803
9804 pub fn parse_table_and_joins(&mut self) -> Result<TableWithJoins, ParserError> {
9805 let relation = self.parse_table_factor()?;
9806 let mut joins = vec![];
9810 loop {
9811 let global = self.parse_keyword(Keyword::GLOBAL);
9812 let join = if self.parse_keyword(Keyword::CROSS) {
9813 let join_operator = if self.parse_keyword(Keyword::JOIN) {
9814 JoinOperator::CrossJoin
9815 } else if self.parse_keyword(Keyword::APPLY) {
9816 JoinOperator::CrossApply
9818 } else {
9819 return self.expected("JOIN or APPLY after CROSS", self.peek_token());
9820 };
9821 Join {
9822 relation: self.parse_table_factor()?,
9823 global,
9824 join_operator,
9825 }
9826 } else if self.parse_keyword(Keyword::OUTER) {
9827 self.expect_keyword(Keyword::APPLY)?;
9829 Join {
9830 relation: self.parse_table_factor()?,
9831 global,
9832 join_operator: JoinOperator::OuterApply,
9833 }
9834 } else if self.parse_keyword(Keyword::ASOF) {
9835 self.expect_keyword(Keyword::JOIN)?;
9836 let relation = self.parse_table_factor()?;
9837 self.expect_keyword(Keyword::MATCH_CONDITION)?;
9838 let match_condition = self.parse_parenthesized(Self::parse_expr)?;
9839 Join {
9840 relation,
9841 global,
9842 join_operator: JoinOperator::AsOf {
9843 match_condition,
9844 constraint: self.parse_join_constraint(false)?,
9845 },
9846 }
9847 } else {
9848 let natural = self.parse_keyword(Keyword::NATURAL);
9849 let peek_keyword = if let Token::Word(w) = self.peek_token().token {
9850 w.keyword
9851 } else {
9852 Keyword::NoKeyword
9853 };
9854
9855 let join_operator_type = match peek_keyword {
9856 Keyword::INNER | Keyword::JOIN => {
9857 let _ = self.parse_keyword(Keyword::INNER); self.expect_keyword(Keyword::JOIN)?;
9859 JoinOperator::Inner
9860 }
9861 kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
9862 let _ = self.next_token(); let is_left = kw == Keyword::LEFT;
9864 let join_type = self.parse_one_of_keywords(&[
9865 Keyword::OUTER,
9866 Keyword::SEMI,
9867 Keyword::ANTI,
9868 Keyword::JOIN,
9869 ]);
9870 match join_type {
9871 Some(Keyword::OUTER) => {
9872 self.expect_keyword(Keyword::JOIN)?;
9873 if is_left {
9874 JoinOperator::LeftOuter
9875 } else {
9876 JoinOperator::RightOuter
9877 }
9878 }
9879 Some(Keyword::SEMI) => {
9880 self.expect_keyword(Keyword::JOIN)?;
9881 if is_left {
9882 JoinOperator::LeftSemi
9883 } else {
9884 JoinOperator::RightSemi
9885 }
9886 }
9887 Some(Keyword::ANTI) => {
9888 self.expect_keyword(Keyword::JOIN)?;
9889 if is_left {
9890 JoinOperator::LeftAnti
9891 } else {
9892 JoinOperator::RightAnti
9893 }
9894 }
9895 Some(Keyword::JOIN) => {
9896 if is_left {
9897 JoinOperator::LeftOuter
9898 } else {
9899 JoinOperator::RightOuter
9900 }
9901 }
9902 _ => {
9903 return Err(ParserError::ParserError(format!(
9904 "expected OUTER, SEMI, ANTI or JOIN after {kw:?}"
9905 )))
9906 }
9907 }
9908 }
9909 Keyword::FULL => {
9910 let _ = self.next_token(); let _ = self.parse_keyword(Keyword::OUTER); self.expect_keyword(Keyword::JOIN)?;
9913 JoinOperator::FullOuter
9914 }
9915 Keyword::OUTER => {
9916 return self.expected("LEFT, RIGHT, or FULL", self.peek_token());
9917 }
9918 _ if natural => {
9919 return self.expected("a join type after NATURAL", self.peek_token());
9920 }
9921 _ => break,
9922 };
9923 let relation = self.parse_table_factor()?;
9924 let join_constraint = self.parse_join_constraint(natural)?;
9925 Join {
9926 relation,
9927 global,
9928 join_operator: join_operator_type(join_constraint),
9929 }
9930 };
9931 joins.push(join);
9932 }
9933 Ok(TableWithJoins { relation, joins })
9934 }
9935
9936 pub fn parse_table_factor(&mut self) -> Result<TableFactor, ParserError> {
9938 if self.parse_keyword(Keyword::LATERAL) {
9939 if self.consume_token(&Token::LParen) {
9941 self.parse_derived_table_factor(Lateral)
9942 } else {
9943 let name = self.parse_object_name(false)?;
9944 self.expect_token(&Token::LParen)?;
9945 let args = self.parse_optional_args()?;
9946 let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
9947 Ok(TableFactor::Function {
9948 lateral: true,
9949 name,
9950 args,
9951 alias,
9952 })
9953 }
9954 } else if self.parse_keyword(Keyword::TABLE) {
9955 self.expect_token(&Token::LParen)?;
9957 let expr = self.parse_expr()?;
9958 self.expect_token(&Token::RParen)?;
9959 let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
9960 Ok(TableFactor::TableFunction { expr, alias })
9961 } else if self.consume_token(&Token::LParen) {
9962 if let Some(mut table) =
9984 self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))?
9985 {
9986 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT])
9987 {
9988 table = match kw {
9989 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
9990 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
9991 _ => unreachable!(),
9992 }
9993 }
9994 return Ok(table);
9995 }
9996
9997 let mut table_and_joins = self.parse_table_and_joins()?;
10004
10005 #[allow(clippy::if_same_then_else)]
10006 if !table_and_joins.joins.is_empty() {
10007 self.expect_token(&Token::RParen)?;
10008 let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
10009 Ok(TableFactor::NestedJoin {
10010 table_with_joins: Box::new(table_and_joins),
10011 alias,
10012 }) } else if let TableFactor::NestedJoin {
10014 table_with_joins: _,
10015 alias: _,
10016 } = &table_and_joins.relation
10017 {
10018 self.expect_token(&Token::RParen)?;
10021 let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
10022 Ok(TableFactor::NestedJoin {
10023 table_with_joins: Box::new(table_and_joins),
10024 alias,
10025 })
10026 } else if dialect_of!(self is SnowflakeDialect | GenericDialect) {
10027 self.expect_token(&Token::RParen)?;
10034
10035 if let Some(outer_alias) =
10036 self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?
10037 {
10038 match &mut table_and_joins.relation {
10041 TableFactor::Derived { alias, .. }
10042 | TableFactor::Table { alias, .. }
10043 | TableFactor::Function { alias, .. }
10044 | TableFactor::UNNEST { alias, .. }
10045 | TableFactor::JsonTable { alias, .. }
10046 | TableFactor::TableFunction { alias, .. }
10047 | TableFactor::Pivot { alias, .. }
10048 | TableFactor::Unpivot { alias, .. }
10049 | TableFactor::MatchRecognize { alias, .. }
10050 | TableFactor::NestedJoin { alias, .. } => {
10051 if let Some(inner_alias) = alias {
10053 return Err(ParserError::ParserError(format!(
10054 "duplicate alias {inner_alias}"
10055 )));
10056 }
10057 alias.replace(outer_alias);
10061 }
10062 };
10063 }
10064 Ok(table_and_joins.relation)
10066 } else {
10067 self.expected("joined table", self.peek_token())
10070 }
10071 } else if dialect_of!(self is SnowflakeDialect | DatabricksDialect | GenericDialect)
10072 && matches!(
10073 self.peek_tokens(),
10074 [
10075 Token::Word(Word {
10076 keyword: Keyword::VALUES,
10077 ..
10078 }),
10079 Token::LParen
10080 ]
10081 )
10082 {
10083 self.expect_keyword(Keyword::VALUES)?;
10084
10085 let values = SetExpr::Values(self.parse_values(false)?);
10089 let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
10090 Ok(TableFactor::Derived {
10091 lateral: false,
10092 subquery: Box::new(Query {
10093 with: None,
10094 body: Box::new(values),
10095 order_by: None,
10096 limit: None,
10097 limit_by: vec![],
10098 offset: None,
10099 fetch: None,
10100 locks: vec![],
10101 for_clause: None,
10102 settings: None,
10103 format_clause: None,
10104 }),
10105 alias,
10106 })
10107 } else if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
10108 && self.parse_keyword(Keyword::UNNEST)
10109 {
10110 self.expect_token(&Token::LParen)?;
10111 let array_exprs = self.parse_comma_separated(Parser::parse_expr)?;
10112 self.expect_token(&Token::RParen)?;
10113
10114 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
10115 let alias = match self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS) {
10116 Ok(Some(alias)) => Some(alias),
10117 Ok(None) => None,
10118 Err(e) => return Err(e),
10119 };
10120
10121 let with_offset = match self.expect_keywords(&[Keyword::WITH, Keyword::OFFSET]) {
10122 Ok(()) => true,
10123 Err(_) => false,
10124 };
10125
10126 let with_offset_alias = if with_offset {
10127 match self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) {
10128 Ok(Some(alias)) => Some(alias),
10129 Ok(None) => None,
10130 Err(e) => return Err(e),
10131 }
10132 } else {
10133 None
10134 };
10135
10136 Ok(TableFactor::UNNEST {
10137 alias,
10138 array_exprs,
10139 with_offset,
10140 with_offset_alias,
10141 with_ordinality,
10142 })
10143 } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) {
10144 let json_expr = self.parse_expr()?;
10145 self.expect_token(&Token::Comma)?;
10146 let json_path = self.parse_value()?;
10147 self.expect_keyword(Keyword::COLUMNS)?;
10148 self.expect_token(&Token::LParen)?;
10149 let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?;
10150 self.expect_token(&Token::RParen)?;
10151 self.expect_token(&Token::RParen)?;
10152 let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
10153 Ok(TableFactor::JsonTable {
10154 json_expr,
10155 json_path,
10156 columns,
10157 alias,
10158 })
10159 } else {
10160 let name = self.parse_object_name(true)?;
10161
10162 let partitions: Vec<Ident> = if dialect_of!(self is MySqlDialect | GenericDialect)
10163 && self.parse_keyword(Keyword::PARTITION)
10164 {
10165 self.parse_parenthesized_identifiers()?
10166 } else {
10167 vec![]
10168 };
10169
10170 let version = self.parse_table_version()?;
10172
10173 let args = if self.consume_token(&Token::LParen) {
10175 Some(self.parse_table_function_args()?)
10176 } else {
10177 None
10178 };
10179
10180 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
10181
10182 let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
10183
10184 let mut with_hints = vec![];
10186 if self.parse_keyword(Keyword::WITH) {
10187 if self.consume_token(&Token::LParen) {
10188 with_hints = self.parse_comma_separated(Parser::parse_expr)?;
10189 self.expect_token(&Token::RParen)?;
10190 } else {
10191 self.prev_token();
10193 }
10194 };
10195
10196 let mut table = TableFactor::Table {
10197 name,
10198 alias,
10199 args,
10200 with_hints,
10201 version,
10202 partitions,
10203 with_ordinality,
10204 };
10205
10206 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) {
10207 table = match kw {
10208 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
10209 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
10210 _ => unreachable!(),
10211 }
10212 }
10213
10214 if self.dialect.supports_match_recognize()
10215 && self.parse_keyword(Keyword::MATCH_RECOGNIZE)
10216 {
10217 table = self.parse_match_recognize(table)?;
10218 }
10219
10220 Ok(table)
10221 }
10222 }
10223
10224 fn parse_match_recognize(&mut self, table: TableFactor) -> Result<TableFactor, ParserError> {
10225 self.expect_token(&Token::LParen)?;
10226
10227 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
10228 self.parse_comma_separated(Parser::parse_expr)?
10229 } else {
10230 vec![]
10231 };
10232
10233 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
10234 self.parse_comma_separated(Parser::parse_order_by_expr)?
10235 } else {
10236 vec![]
10237 };
10238
10239 let measures = if self.parse_keyword(Keyword::MEASURES) {
10240 self.parse_comma_separated(|p| {
10241 let expr = p.parse_expr()?;
10242 let _ = p.parse_keyword(Keyword::AS);
10243 let alias = p.parse_identifier(false)?;
10244 Ok(Measure { expr, alias })
10245 })?
10246 } else {
10247 vec![]
10248 };
10249
10250 let rows_per_match =
10251 if self.parse_keywords(&[Keyword::ONE, Keyword::ROW, Keyword::PER, Keyword::MATCH]) {
10252 Some(RowsPerMatch::OneRow)
10253 } else if self.parse_keywords(&[
10254 Keyword::ALL,
10255 Keyword::ROWS,
10256 Keyword::PER,
10257 Keyword::MATCH,
10258 ]) {
10259 Some(RowsPerMatch::AllRows(
10260 if self.parse_keywords(&[Keyword::SHOW, Keyword::EMPTY, Keyword::MATCHES]) {
10261 Some(EmptyMatchesMode::Show)
10262 } else if self.parse_keywords(&[
10263 Keyword::OMIT,
10264 Keyword::EMPTY,
10265 Keyword::MATCHES,
10266 ]) {
10267 Some(EmptyMatchesMode::Omit)
10268 } else if self.parse_keywords(&[
10269 Keyword::WITH,
10270 Keyword::UNMATCHED,
10271 Keyword::ROWS,
10272 ]) {
10273 Some(EmptyMatchesMode::WithUnmatched)
10274 } else {
10275 None
10276 },
10277 ))
10278 } else {
10279 None
10280 };
10281
10282 let after_match_skip =
10283 if self.parse_keywords(&[Keyword::AFTER, Keyword::MATCH, Keyword::SKIP]) {
10284 if self.parse_keywords(&[Keyword::PAST, Keyword::LAST, Keyword::ROW]) {
10285 Some(AfterMatchSkip::PastLastRow)
10286 } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) {
10287 Some(AfterMatchSkip::ToNextRow)
10288 } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) {
10289 Some(AfterMatchSkip::ToFirst(self.parse_identifier(false)?))
10290 } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
10291 Some(AfterMatchSkip::ToLast(self.parse_identifier(false)?))
10292 } else {
10293 let found = self.next_token();
10294 return self.expected("after match skip option", found);
10295 }
10296 } else {
10297 None
10298 };
10299
10300 self.expect_keyword(Keyword::PATTERN)?;
10301 let pattern = self.parse_parenthesized(Self::parse_pattern)?;
10302
10303 self.expect_keyword(Keyword::DEFINE)?;
10304
10305 let symbols = self.parse_comma_separated(|p| {
10306 let symbol = p.parse_identifier(false)?;
10307 p.expect_keyword(Keyword::AS)?;
10308 let definition = p.parse_expr()?;
10309 Ok(SymbolDefinition { symbol, definition })
10310 })?;
10311
10312 self.expect_token(&Token::RParen)?;
10313
10314 let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
10315
10316 Ok(TableFactor::MatchRecognize {
10317 table: Box::new(table),
10318 partition_by,
10319 order_by,
10320 measures,
10321 rows_per_match,
10322 after_match_skip,
10323 pattern,
10324 symbols,
10325 alias,
10326 })
10327 }
10328
10329 fn parse_base_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
10330 match self.next_token().token {
10331 Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)),
10332 Token::Placeholder(s) if s == "$" => {
10333 Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::End))
10334 }
10335 Token::LBrace => {
10336 self.expect_token(&Token::Minus)?;
10337 let symbol = self
10338 .parse_identifier(false)
10339 .map(MatchRecognizeSymbol::Named)?;
10340 self.expect_token(&Token::Minus)?;
10341 self.expect_token(&Token::RBrace)?;
10342 Ok(MatchRecognizePattern::Exclude(symbol))
10343 }
10344 Token::Word(Word {
10345 value,
10346 quote_style: None,
10347 ..
10348 }) if value == "PERMUTE" => {
10349 self.expect_token(&Token::LParen)?;
10350 let symbols = self.parse_comma_separated(|p| {
10351 p.parse_identifier(false).map(MatchRecognizeSymbol::Named)
10352 })?;
10353 self.expect_token(&Token::RParen)?;
10354 Ok(MatchRecognizePattern::Permute(symbols))
10355 }
10356 Token::LParen => {
10357 let pattern = self.parse_pattern()?;
10358 self.expect_token(&Token::RParen)?;
10359 Ok(MatchRecognizePattern::Group(Box::new(pattern)))
10360 }
10361 _ => {
10362 self.prev_token();
10363 self.parse_identifier(false)
10364 .map(MatchRecognizeSymbol::Named)
10365 .map(MatchRecognizePattern::Symbol)
10366 }
10367 }
10368 }
10369
10370 fn parse_repetition_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
10371 let mut pattern = self.parse_base_pattern()?;
10372 loop {
10373 let token = self.next_token();
10374 let quantifier = match token.token {
10375 Token::Mul => RepetitionQuantifier::ZeroOrMore,
10376 Token::Plus => RepetitionQuantifier::OneOrMore,
10377 Token::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne,
10378 Token::LBrace => {
10379 let token = self.next_token();
10381 match token.token {
10382 Token::Comma => {
10383 let next_token = self.next_token();
10384 let Token::Number(n, _) = next_token.token else {
10385 return self.expected("literal number", next_token);
10386 };
10387 self.expect_token(&Token::RBrace)?;
10388 RepetitionQuantifier::AtMost(Self::parse(n, token.location)?)
10389 }
10390 Token::Number(n, _) if self.consume_token(&Token::Comma) => {
10391 let next_token = self.next_token();
10392 match next_token.token {
10393 Token::Number(m, _) => {
10394 self.expect_token(&Token::RBrace)?;
10395 RepetitionQuantifier::Range(
10396 Self::parse(n, token.location)?,
10397 Self::parse(m, token.location)?,
10398 )
10399 }
10400 Token::RBrace => {
10401 RepetitionQuantifier::AtLeast(Self::parse(n, token.location)?)
10402 }
10403 _ => {
10404 return self.expected("} or upper bound", next_token);
10405 }
10406 }
10407 }
10408 Token::Number(n, _) => {
10409 self.expect_token(&Token::RBrace)?;
10410 RepetitionQuantifier::Exactly(Self::parse(n, token.location)?)
10411 }
10412 _ => return self.expected("quantifier range", token),
10413 }
10414 }
10415 _ => {
10416 self.prev_token();
10417 break;
10418 }
10419 };
10420 pattern = MatchRecognizePattern::Repetition(Box::new(pattern), quantifier);
10421 }
10422 Ok(pattern)
10423 }
10424
10425 fn parse_concat_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
10426 let mut patterns = vec![self.parse_repetition_pattern()?];
10427 while !matches!(self.peek_token().token, Token::RParen | Token::Pipe) {
10428 patterns.push(self.parse_repetition_pattern()?);
10429 }
10430 match <[MatchRecognizePattern; 1]>::try_from(patterns) {
10431 Ok([pattern]) => Ok(pattern),
10432 Err(patterns) => Ok(MatchRecognizePattern::Concat(patterns)),
10433 }
10434 }
10435
10436 fn parse_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
10437 let pattern = self.parse_concat_pattern()?;
10438 if self.consume_token(&Token::Pipe) {
10439 match self.parse_pattern()? {
10440 MatchRecognizePattern::Alternation(mut patterns) => {
10442 patterns.insert(0, pattern);
10443 Ok(MatchRecognizePattern::Alternation(patterns))
10444 }
10445 next => Ok(MatchRecognizePattern::Alternation(vec![pattern, next])),
10446 }
10447 } else {
10448 Ok(pattern)
10449 }
10450 }
10451
10452 pub fn parse_table_version(&mut self) -> Result<Option<TableVersion>, ParserError> {
10456 if dialect_of!(self is BigQueryDialect | MsSqlDialect)
10457 && self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
10458 {
10459 let expr = self.parse_expr()?;
10460 Ok(Some(TableVersion::ForSystemTimeAsOf(expr)))
10461 } else {
10462 Ok(None)
10463 }
10464 }
10465
10466 pub fn parse_json_table_column_def(&mut self) -> Result<JsonTableColumn, ParserError> {
10469 if self.parse_keyword(Keyword::NESTED) {
10470 let _has_path_keyword = self.parse_keyword(Keyword::PATH);
10471 let path = self.parse_value()?;
10472 self.expect_keyword(Keyword::COLUMNS)?;
10473 let columns = self.parse_parenthesized(|p| {
10474 p.parse_comma_separated(Self::parse_json_table_column_def)
10475 })?;
10476 return Ok(JsonTableColumn::Nested(JsonTableNestedColumn {
10477 path,
10478 columns,
10479 }));
10480 }
10481 let name = self.parse_identifier(false)?;
10482 if self.parse_keyword(Keyword::FOR) {
10483 self.expect_keyword(Keyword::ORDINALITY)?;
10484 return Ok(JsonTableColumn::ForOrdinality(name));
10485 }
10486 let r#type = self.parse_data_type()?;
10487 let exists = self.parse_keyword(Keyword::EXISTS);
10488 self.expect_keyword(Keyword::PATH)?;
10489 let path = self.parse_value()?;
10490 let mut on_empty = None;
10491 let mut on_error = None;
10492 while let Some(error_handling) = self.parse_json_table_column_error_handling()? {
10493 if self.parse_keyword(Keyword::EMPTY) {
10494 on_empty = Some(error_handling);
10495 } else {
10496 self.expect_keyword(Keyword::ERROR)?;
10497 on_error = Some(error_handling);
10498 }
10499 }
10500 Ok(JsonTableColumn::Named(JsonTableNamedColumn {
10501 name,
10502 r#type,
10503 path,
10504 exists,
10505 on_empty,
10506 on_error,
10507 }))
10508 }
10509
10510 fn parse_json_table_column_error_handling(
10511 &mut self,
10512 ) -> Result<Option<JsonTableColumnErrorHandling>, ParserError> {
10513 let res = if self.parse_keyword(Keyword::NULL) {
10514 JsonTableColumnErrorHandling::Null
10515 } else if self.parse_keyword(Keyword::ERROR) {
10516 JsonTableColumnErrorHandling::Error
10517 } else if self.parse_keyword(Keyword::DEFAULT) {
10518 JsonTableColumnErrorHandling::Default(self.parse_value()?)
10519 } else {
10520 return Ok(None);
10521 };
10522 self.expect_keyword(Keyword::ON)?;
10523 Ok(Some(res))
10524 }
10525
10526 pub fn parse_derived_table_factor(
10527 &mut self,
10528 lateral: IsLateral,
10529 ) -> Result<TableFactor, ParserError> {
10530 let subquery = self.parse_query()?;
10531 self.expect_token(&Token::RParen)?;
10532 let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
10533 Ok(TableFactor::Derived {
10534 lateral: match lateral {
10535 Lateral => true,
10536 NotLateral => false,
10537 },
10538 subquery,
10539 alias,
10540 })
10541 }
10542
10543 fn parse_aliased_function_call(&mut self) -> Result<ExprWithAlias, ParserError> {
10544 let function_name = match self.next_token().token {
10545 Token::Word(w) => Ok(w.value),
10546 _ => self.expected("a function identifier", self.peek_token()),
10547 }?;
10548 let expr = self.parse_function(ObjectName(vec![Ident::new(function_name)]))?;
10549 let alias = if self.parse_keyword(Keyword::AS) {
10550 Some(self.parse_identifier(false)?)
10551 } else {
10552 None
10553 };
10554
10555 Ok(ExprWithAlias { expr, alias })
10556 }
10557 pub fn parse_expr_with_alias(&mut self) -> Result<ExprWithAlias, ParserError> {
10582 let expr = self.parse_expr()?;
10583 let alias = if self.parse_keyword(Keyword::AS) {
10584 Some(self.parse_identifier(false)?)
10585 } else {
10586 None
10587 };
10588
10589 Ok(ExprWithAlias { expr, alias })
10590 }
10591
10592 pub fn parse_pivot_table_factor(
10593 &mut self,
10594 table: TableFactor,
10595 ) -> Result<TableFactor, ParserError> {
10596 self.expect_token(&Token::LParen)?;
10597 let aggregate_functions = self.parse_comma_separated(Self::parse_aliased_function_call)?;
10598 self.expect_keyword(Keyword::FOR)?;
10599 let value_column = self.parse_object_name(false)?.0;
10600 self.expect_keyword(Keyword::IN)?;
10601
10602 self.expect_token(&Token::LParen)?;
10603 let value_source = if self.parse_keyword(Keyword::ANY) {
10604 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
10605 self.parse_comma_separated(Parser::parse_order_by_expr)?
10606 } else {
10607 vec![]
10608 };
10609 PivotValueSource::Any(order_by)
10610 } else if self.peek_sub_query() {
10611 PivotValueSource::Subquery(self.parse_query()?)
10612 } else {
10613 PivotValueSource::List(self.parse_comma_separated(Self::parse_expr_with_alias)?)
10614 };
10615 self.expect_token(&Token::RParen)?;
10616
10617 let default_on_null =
10618 if self.parse_keywords(&[Keyword::DEFAULT, Keyword::ON, Keyword::NULL]) {
10619 self.expect_token(&Token::LParen)?;
10620 let expr = self.parse_expr()?;
10621 self.expect_token(&Token::RParen)?;
10622 Some(expr)
10623 } else {
10624 None
10625 };
10626
10627 self.expect_token(&Token::RParen)?;
10628 let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
10629 Ok(TableFactor::Pivot {
10630 table: Box::new(table),
10631 aggregate_functions,
10632 value_column,
10633 value_source,
10634 default_on_null,
10635 alias,
10636 })
10637 }
10638
10639 pub fn parse_unpivot_table_factor(
10640 &mut self,
10641 table: TableFactor,
10642 ) -> Result<TableFactor, ParserError> {
10643 self.expect_token(&Token::LParen)?;
10644 let value = self.parse_identifier(false)?;
10645 self.expect_keyword(Keyword::FOR)?;
10646 let name = self.parse_identifier(false)?;
10647 self.expect_keyword(Keyword::IN)?;
10648 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
10649 self.expect_token(&Token::RParen)?;
10650 let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
10651 Ok(TableFactor::Unpivot {
10652 table: Box::new(table),
10653 value,
10654 name,
10655 columns,
10656 alias,
10657 })
10658 }
10659
10660 pub fn parse_join_constraint(&mut self, natural: bool) -> Result<JoinConstraint, ParserError> {
10661 if natural {
10662 Ok(JoinConstraint::Natural)
10663 } else if self.parse_keyword(Keyword::ON) {
10664 let constraint = self.parse_expr()?;
10665 Ok(JoinConstraint::On(constraint))
10666 } else if self.parse_keyword(Keyword::USING) {
10667 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
10668 Ok(JoinConstraint::Using(columns))
10669 } else {
10670 Ok(JoinConstraint::None)
10671 }
10673 }
10674
10675 pub fn parse_grant(&mut self) -> Result<Statement, ParserError> {
10677 let (privileges, objects) = self.parse_grant_revoke_privileges_objects()?;
10678
10679 self.expect_keyword(Keyword::TO)?;
10680 let grantees = self.parse_comma_separated(|p| p.parse_identifier(false))?;
10681
10682 let with_grant_option =
10683 self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
10684
10685 let granted_by = self
10686 .parse_keywords(&[Keyword::GRANTED, Keyword::BY])
10687 .then(|| self.parse_identifier(false).unwrap());
10688
10689 Ok(Statement::Grant {
10690 privileges,
10691 objects,
10692 grantees,
10693 with_grant_option,
10694 granted_by,
10695 })
10696 }
10697
10698 pub fn parse_grant_revoke_privileges_objects(
10699 &mut self,
10700 ) -> Result<(Privileges, GrantObjects), ParserError> {
10701 let privileges = if self.parse_keyword(Keyword::ALL) {
10702 Privileges::All {
10703 with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
10704 }
10705 } else {
10706 let (actions, err): (Vec<_>, Vec<_>) = self
10707 .parse_actions_list()?
10708 .into_iter()
10709 .map(|(kw, columns)| match kw {
10710 Keyword::DELETE => Ok(Action::Delete),
10711 Keyword::INSERT => Ok(Action::Insert { columns }),
10712 Keyword::REFERENCES => Ok(Action::References { columns }),
10713 Keyword::SELECT => Ok(Action::Select { columns }),
10714 Keyword::TRIGGER => Ok(Action::Trigger),
10715 Keyword::TRUNCATE => Ok(Action::Truncate),
10716 Keyword::UPDATE => Ok(Action::Update { columns }),
10717 Keyword::USAGE => Ok(Action::Usage),
10718 Keyword::CONNECT => Ok(Action::Connect),
10719 Keyword::CREATE => Ok(Action::Create),
10720 Keyword::EXECUTE => Ok(Action::Execute),
10721 Keyword::TEMPORARY => Ok(Action::Temporary),
10722 _ => Err(kw),
10726 })
10727 .partition(Result::is_ok);
10728
10729 if !err.is_empty() {
10730 let errors: Vec<Keyword> = err.into_iter().filter_map(|x| x.err()).collect();
10731 return Err(ParserError::ParserError(format!(
10732 "INTERNAL ERROR: GRANT/REVOKE unexpected keyword(s) - {errors:?}"
10733 )));
10734 }
10735 let act = actions.into_iter().filter_map(|x| x.ok()).collect();
10736 Privileges::Actions(act)
10737 };
10738
10739 self.expect_keyword(Keyword::ON)?;
10740
10741 let objects = if self.parse_keywords(&[
10742 Keyword::ALL,
10743 Keyword::TABLES,
10744 Keyword::IN,
10745 Keyword::SCHEMA,
10746 ]) {
10747 GrantObjects::AllTablesInSchema {
10748 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
10749 }
10750 } else if self.parse_keywords(&[
10751 Keyword::ALL,
10752 Keyword::SEQUENCES,
10753 Keyword::IN,
10754 Keyword::SCHEMA,
10755 ]) {
10756 GrantObjects::AllSequencesInSchema {
10757 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
10758 }
10759 } else {
10760 let object_type =
10761 self.parse_one_of_keywords(&[Keyword::SEQUENCE, Keyword::SCHEMA, Keyword::TABLE]);
10762 let objects = self.parse_comma_separated(|p| p.parse_object_name(false));
10763 match object_type {
10764 Some(Keyword::SCHEMA) => GrantObjects::Schemas(objects?),
10765 Some(Keyword::SEQUENCE) => GrantObjects::Sequences(objects?),
10766 Some(Keyword::TABLE) | None => GrantObjects::Tables(objects?),
10767 _ => unreachable!(),
10768 }
10769 };
10770
10771 Ok((privileges, objects))
10772 }
10773
10774 pub fn parse_grant_permission(&mut self) -> Result<ParsedAction, ParserError> {
10775 if let Some(kw) = self.parse_one_of_keywords(&[
10776 Keyword::CONNECT,
10777 Keyword::CREATE,
10778 Keyword::DELETE,
10779 Keyword::EXECUTE,
10780 Keyword::INSERT,
10781 Keyword::REFERENCES,
10782 Keyword::SELECT,
10783 Keyword::TEMPORARY,
10784 Keyword::TRIGGER,
10785 Keyword::TRUNCATE,
10786 Keyword::UPDATE,
10787 Keyword::USAGE,
10788 ]) {
10789 let columns = match kw {
10790 Keyword::INSERT | Keyword::REFERENCES | Keyword::SELECT | Keyword::UPDATE => {
10791 let columns = self.parse_parenthesized_column_list(Optional, false)?;
10792 if columns.is_empty() {
10793 None
10794 } else {
10795 Some(columns)
10796 }
10797 }
10798 _ => None,
10799 };
10800 Ok((kw, columns))
10801 } else {
10802 self.expected("a privilege keyword", self.peek_token())?
10803 }
10804 }
10805
10806 pub fn parse_revoke(&mut self) -> Result<Statement, ParserError> {
10808 let (privileges, objects) = self.parse_grant_revoke_privileges_objects()?;
10809
10810 self.expect_keyword(Keyword::FROM)?;
10811 let grantees = self.parse_comma_separated(|p| p.parse_identifier(false))?;
10812
10813 let granted_by = self
10814 .parse_keywords(&[Keyword::GRANTED, Keyword::BY])
10815 .then(|| self.parse_identifier(false).unwrap());
10816
10817 let loc = self.peek_token().location;
10818 let cascade = self.parse_keyword(Keyword::CASCADE);
10819 let restrict = self.parse_keyword(Keyword::RESTRICT);
10820 if cascade && restrict {
10821 return parser_err!("Cannot specify both CASCADE and RESTRICT in REVOKE", loc);
10822 }
10823
10824 Ok(Statement::Revoke {
10825 privileges,
10826 objects,
10827 grantees,
10828 granted_by,
10829 cascade,
10830 })
10831 }
10832
10833 pub fn parse_replace(&mut self) -> Result<Statement, ParserError> {
10835 if !dialect_of!(self is MySqlDialect | GenericDialect) {
10836 return parser_err!("Unsupported statement REPLACE", self.peek_token().location);
10837 }
10838
10839 let mut insert = self.parse_insert()?;
10840 if let Statement::Insert(Insert { replace_into, .. }) = &mut insert {
10841 *replace_into = true;
10842 }
10843
10844 Ok(insert)
10845 }
10846
10847 fn parse_insert_setexpr_boxed(&mut self) -> Result<Box<SetExpr>, ParserError> {
10851 Ok(Box::new(SetExpr::Insert(self.parse_insert()?)))
10852 }
10853
10854 pub fn parse_insert(&mut self) -> Result<Statement, ParserError> {
10856 let or = if !dialect_of!(self is SQLiteDialect) {
10857 None
10858 } else if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) {
10859 Some(SqliteOnConflict::Replace)
10860 } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) {
10861 Some(SqliteOnConflict::Rollback)
10862 } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) {
10863 Some(SqliteOnConflict::Abort)
10864 } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) {
10865 Some(SqliteOnConflict::Fail)
10866 } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) {
10867 Some(SqliteOnConflict::Ignore)
10868 } else if self.parse_keyword(Keyword::REPLACE) {
10869 Some(SqliteOnConflict::Replace)
10870 } else {
10871 None
10872 };
10873
10874 let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) {
10875 None
10876 } else if self.parse_keyword(Keyword::LOW_PRIORITY) {
10877 Some(MysqlInsertPriority::LowPriority)
10878 } else if self.parse_keyword(Keyword::DELAYED) {
10879 Some(MysqlInsertPriority::Delayed)
10880 } else if self.parse_keyword(Keyword::HIGH_PRIORITY) {
10881 Some(MysqlInsertPriority::HighPriority)
10882 } else {
10883 None
10884 };
10885
10886 let ignore = dialect_of!(self is MySqlDialect | GenericDialect)
10887 && self.parse_keyword(Keyword::IGNORE);
10888
10889 let replace_into = false;
10890
10891 let action = self.parse_one_of_keywords(&[Keyword::INTO, Keyword::OVERWRITE]);
10892 let into = action == Some(Keyword::INTO);
10893 let overwrite = action == Some(Keyword::OVERWRITE);
10894
10895 let local = self.parse_keyword(Keyword::LOCAL);
10896
10897 if self.parse_keyword(Keyword::DIRECTORY) {
10898 let path = self.parse_literal_string()?;
10899 let file_format = if self.parse_keywords(&[Keyword::STORED, Keyword::AS]) {
10900 Some(self.parse_file_format()?)
10901 } else {
10902 None
10903 };
10904 let source = self.parse_query()?;
10905 Ok(Statement::Directory {
10906 local,
10907 path,
10908 overwrite,
10909 file_format,
10910 source,
10911 })
10912 } else {
10913 let table = self.parse_keyword(Keyword::TABLE);
10915 let table_name = self.parse_object_name(false)?;
10916
10917 let table_alias =
10918 if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::AS) {
10919 Some(self.parse_identifier(false)?)
10920 } else {
10921 None
10922 };
10923
10924 let is_mysql = dialect_of!(self is MySqlDialect);
10925
10926 let (columns, partitioned, after_columns, source) =
10927 if self.parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES]) {
10928 (vec![], None, vec![], None)
10929 } else {
10930 let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?;
10931
10932 let partitioned = self.parse_insert_partition()?;
10933 let after_columns = if dialect_of!(self is HiveDialect) {
10935 self.parse_parenthesized_column_list(Optional, false)?
10936 } else {
10937 vec![]
10938 };
10939
10940 let source = Some(self.parse_query()?);
10941
10942 (columns, partitioned, after_columns, source)
10943 };
10944
10945 let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect)
10946 && self.parse_keyword(Keyword::AS)
10947 {
10948 let row_alias = self.parse_object_name(false)?;
10949 let col_aliases = Some(self.parse_parenthesized_column_list(Optional, false)?);
10950 Some(InsertAliases {
10951 row_alias,
10952 col_aliases,
10953 })
10954 } else {
10955 None
10956 };
10957
10958 let on = if self.parse_keyword(Keyword::ON) {
10959 if self.parse_keyword(Keyword::CONFLICT) {
10960 let conflict_target =
10961 if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) {
10962 Some(ConflictTarget::OnConstraint(self.parse_object_name(false)?))
10963 } else if self.peek_token() == Token::LParen {
10964 Some(ConflictTarget::Columns(
10965 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
10966 ))
10967 } else {
10968 None
10969 };
10970
10971 self.expect_keyword(Keyword::DO)?;
10972 let action = if self.parse_keyword(Keyword::NOTHING) {
10973 OnConflictAction::DoNothing
10974 } else {
10975 self.expect_keyword(Keyword::UPDATE)?;
10976 self.expect_keyword(Keyword::SET)?;
10977 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
10978 let selection = if self.parse_keyword(Keyword::WHERE) {
10979 Some(self.parse_expr()?)
10980 } else {
10981 None
10982 };
10983 OnConflictAction::DoUpdate(DoUpdate {
10984 assignments,
10985 selection,
10986 })
10987 };
10988
10989 Some(OnInsert::OnConflict(OnConflict {
10990 conflict_target,
10991 action,
10992 }))
10993 } else {
10994 self.expect_keyword(Keyword::DUPLICATE)?;
10995 self.expect_keyword(Keyword::KEY)?;
10996 self.expect_keyword(Keyword::UPDATE)?;
10997 let l = self.parse_comma_separated(Parser::parse_assignment)?;
10998
10999 Some(OnInsert::DuplicateKeyUpdate(l))
11000 }
11001 } else {
11002 None
11003 };
11004
11005 let returning = if self.parse_keyword(Keyword::RETURNING) {
11006 Some(self.parse_comma_separated(Parser::parse_select_item)?)
11007 } else {
11008 None
11009 };
11010
11011 Ok(Statement::Insert(Insert {
11012 or,
11013 table_name,
11014 table_alias,
11015 ignore,
11016 into,
11017 overwrite,
11018 partitioned,
11019 columns,
11020 after_columns,
11021 source,
11022 table,
11023 on,
11024 returning,
11025 replace_into,
11026 priority,
11027 insert_alias,
11028 }))
11029 }
11030 }
11031
11032 pub fn parse_insert_partition(&mut self) -> Result<Option<Vec<Expr>>, ParserError> {
11033 if self.parse_keyword(Keyword::PARTITION) {
11034 self.expect_token(&Token::LParen)?;
11035 let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?);
11036 self.expect_token(&Token::RParen)?;
11037 Ok(partition_cols)
11038 } else {
11039 Ok(None)
11040 }
11041 }
11042
11043 fn parse_update_setexpr_boxed(&mut self) -> Result<Box<SetExpr>, ParserError> {
11047 Ok(Box::new(SetExpr::Update(self.parse_update()?)))
11048 }
11049
11050 pub fn parse_update(&mut self) -> Result<Statement, ParserError> {
11051 let table = self.parse_table_and_joins()?;
11052 self.expect_keyword(Keyword::SET)?;
11053 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
11054 let from = if self.parse_keyword(Keyword::FROM)
11055 && dialect_of!(self is GenericDialect | PostgreSqlDialect | DuckDbDialect | BigQueryDialect | SnowflakeDialect | RedshiftSqlDialect | MsSqlDialect | SQLiteDialect )
11056 {
11057 Some(self.parse_table_and_joins()?)
11058 } else {
11059 None
11060 };
11061 let selection = if self.parse_keyword(Keyword::WHERE) {
11062 Some(self.parse_expr()?)
11063 } else {
11064 None
11065 };
11066 let returning = if self.parse_keyword(Keyword::RETURNING) {
11067 Some(self.parse_comma_separated(Parser::parse_select_item)?)
11068 } else {
11069 None
11070 };
11071 Ok(Statement::Update {
11072 table,
11073 assignments,
11074 from,
11075 selection,
11076 returning,
11077 })
11078 }
11079
11080 pub fn parse_assignment(&mut self) -> Result<Assignment, ParserError> {
11082 let target = self.parse_assignment_target()?;
11083 self.expect_token(&Token::Eq)?;
11084 let value = self.parse_expr()?;
11085 Ok(Assignment { target, value })
11086 }
11087
11088 pub fn parse_assignment_target(&mut self) -> Result<AssignmentTarget, ParserError> {
11090 if self.consume_token(&Token::LParen) {
11091 let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?;
11092 self.expect_token(&Token::RParen)?;
11093 Ok(AssignmentTarget::Tuple(columns))
11094 } else {
11095 let column = self.parse_object_name(false)?;
11096 Ok(AssignmentTarget::ColumnName(column))
11097 }
11098 }
11099
11100 pub fn parse_function_args(&mut self) -> Result<FunctionArg, ParserError> {
11101 if self.peek_nth_token(1) == Token::RArrow {
11102 let name = self.parse_identifier(false)?;
11103
11104 self.expect_token(&Token::RArrow)?;
11105 let arg = self.parse_wildcard_expr()?.into();
11106
11107 Ok(FunctionArg::Named {
11108 name,
11109 arg,
11110 operator: FunctionArgOperator::RightArrow,
11111 })
11112 } else if self.dialect.supports_named_fn_args_with_eq_operator()
11113 && self.peek_nth_token(1) == Token::Eq
11114 {
11115 let name = self.parse_identifier(false)?;
11116
11117 self.expect_token(&Token::Eq)?;
11118 let arg = self.parse_wildcard_expr()?.into();
11119
11120 Ok(FunctionArg::Named {
11121 name,
11122 arg,
11123 operator: FunctionArgOperator::Equals,
11124 })
11125 } else if dialect_of!(self is DuckDbDialect | GenericDialect)
11126 && self.peek_nth_token(1) == Token::Assignment
11127 {
11128 let name = self.parse_identifier(false)?;
11129
11130 self.expect_token(&Token::Assignment)?;
11131 let arg = self.parse_expr()?.into();
11132
11133 Ok(FunctionArg::Named {
11134 name,
11135 arg,
11136 operator: FunctionArgOperator::Assignment,
11137 })
11138 } else {
11139 Ok(FunctionArg::Unnamed(self.parse_wildcard_expr()?.into()))
11140 }
11141 }
11142
11143 pub fn parse_optional_args(&mut self) -> Result<Vec<FunctionArg>, ParserError> {
11144 if self.consume_token(&Token::RParen) {
11145 Ok(vec![])
11146 } else {
11147 let args = self.parse_comma_separated(Parser::parse_function_args)?;
11148 self.expect_token(&Token::RParen)?;
11149 Ok(args)
11150 }
11151 }
11152
11153 fn parse_table_function_args(&mut self) -> Result<TableFunctionArgs, ParserError> {
11154 if self.consume_token(&Token::RParen) {
11155 return Ok(TableFunctionArgs {
11156 args: vec![],
11157 settings: None,
11158 });
11159 }
11160 let mut args = vec![];
11161 let settings = loop {
11162 if let Some(settings) = self.parse_settings()? {
11163 break Some(settings);
11164 }
11165 args.push(self.parse_function_args()?);
11166 if self.is_parse_comma_separated_end() {
11167 break None;
11168 }
11169 };
11170 self.expect_token(&Token::RParen)?;
11171 Ok(TableFunctionArgs { args, settings })
11172 }
11173
11174 fn parse_function_argument_list(&mut self) -> Result<FunctionArgumentList, ParserError> {
11183 if self.consume_token(&Token::RParen) {
11184 return Ok(FunctionArgumentList {
11185 duplicate_treatment: None,
11186 args: vec![],
11187 clauses: vec![],
11188 });
11189 }
11190
11191 let duplicate_treatment = self.parse_duplicate_treatment()?;
11192 let args = self.parse_comma_separated(Parser::parse_function_args)?;
11193
11194 let mut clauses = vec![];
11195
11196 if self.dialect.supports_window_function_null_treatment_arg() {
11197 if let Some(null_treatment) = self.parse_null_treatment()? {
11198 clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment));
11199 }
11200 }
11201
11202 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11203 clauses.push(FunctionArgumentClause::OrderBy(
11204 self.parse_comma_separated(Parser::parse_order_by_expr)?,
11205 ));
11206 }
11207
11208 if self.parse_keyword(Keyword::LIMIT) {
11209 clauses.push(FunctionArgumentClause::Limit(self.parse_expr()?));
11210 }
11211
11212 if dialect_of!(self is GenericDialect | BigQueryDialect)
11213 && self.parse_keyword(Keyword::HAVING)
11214 {
11215 let kind = match self.expect_one_of_keywords(&[Keyword::MIN, Keyword::MAX])? {
11216 Keyword::MIN => HavingBoundKind::Min,
11217 Keyword::MAX => HavingBoundKind::Max,
11218 _ => unreachable!(),
11219 };
11220 clauses.push(FunctionArgumentClause::Having(HavingBound(
11221 kind,
11222 self.parse_expr()?,
11223 )))
11224 }
11225
11226 if dialect_of!(self is GenericDialect | MySqlDialect)
11227 && self.parse_keyword(Keyword::SEPARATOR)
11228 {
11229 clauses.push(FunctionArgumentClause::Separator(self.parse_value()?));
11230 }
11231
11232 if let Some(on_overflow) = self.parse_listagg_on_overflow()? {
11233 clauses.push(FunctionArgumentClause::OnOverflow(on_overflow));
11234 }
11235
11236 self.expect_token(&Token::RParen)?;
11237 Ok(FunctionArgumentList {
11238 duplicate_treatment,
11239 args,
11240 clauses,
11241 })
11242 }
11243
11244 fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> {
11245 let loc = self.peek_token().location;
11246 match (
11247 self.parse_keyword(Keyword::ALL),
11248 self.parse_keyword(Keyword::DISTINCT),
11249 ) {
11250 (true, false) => Ok(Some(DuplicateTreatment::All)),
11251 (false, true) => Ok(Some(DuplicateTreatment::Distinct)),
11252 (false, false) => Ok(None),
11253 (true, true) => parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc),
11254 }
11255 }
11256
11257 pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
11259 match self.parse_wildcard_expr()? {
11260 Expr::QualifiedWildcard(prefix) => Ok(SelectItem::QualifiedWildcard(
11261 prefix,
11262 self.parse_wildcard_additional_options()?,
11263 )),
11264 Expr::Wildcard => Ok(SelectItem::Wildcard(
11265 self.parse_wildcard_additional_options()?,
11266 )),
11267 Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => {
11268 parser_err!(
11269 format!("Expected an expression, found: {}", v),
11270 self.peek_token().location
11271 )
11272 }
11273 Expr::BinaryOp {
11274 left,
11275 op: BinaryOperator::Eq,
11276 right,
11277 } if self.dialect.supports_eq_alias_assignment()
11278 && matches!(left.as_ref(), Expr::Identifier(_)) =>
11279 {
11280 let Expr::Identifier(alias) = *left else {
11281 return parser_err!(
11282 "BUG: expected identifier expression as alias",
11283 self.peek_token().location
11284 );
11285 };
11286 Ok(SelectItem::ExprWithAlias {
11287 expr: *right,
11288 alias,
11289 })
11290 }
11291 expr => self
11292 .parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS)
11293 .map(|alias| match alias {
11294 Some(alias) => SelectItem::ExprWithAlias { expr, alias },
11295 None => SelectItem::UnnamedExpr(expr),
11296 }),
11297 }
11298 }
11299
11300 pub fn parse_wildcard_additional_options(
11304 &mut self,
11305 ) -> Result<WildcardAdditionalOptions, ParserError> {
11306 let opt_ilike = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
11307 self.parse_optional_select_item_ilike()?
11308 } else {
11309 None
11310 };
11311 let opt_exclude = if opt_ilike.is_none()
11312 && dialect_of!(self is GenericDialect | DuckDbDialect | SnowflakeDialect)
11313 {
11314 self.parse_optional_select_item_exclude()?
11315 } else {
11316 None
11317 };
11318 let opt_except = if self.dialect.supports_select_wildcard_except() {
11319 self.parse_optional_select_item_except()?
11320 } else {
11321 None
11322 };
11323 let opt_replace = if dialect_of!(self is GenericDialect | BigQueryDialect | ClickHouseDialect | DuckDbDialect | SnowflakeDialect)
11324 {
11325 self.parse_optional_select_item_replace()?
11326 } else {
11327 None
11328 };
11329 let opt_rename = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
11330 self.parse_optional_select_item_rename()?
11331 } else {
11332 None
11333 };
11334
11335 Ok(WildcardAdditionalOptions {
11336 opt_ilike,
11337 opt_exclude,
11338 opt_except,
11339 opt_rename,
11340 opt_replace,
11341 })
11342 }
11343
11344 pub fn parse_optional_select_item_ilike(
11348 &mut self,
11349 ) -> Result<Option<IlikeSelectItem>, ParserError> {
11350 let opt_ilike = if self.parse_keyword(Keyword::ILIKE) {
11351 let next_token = self.next_token();
11352 let pattern = match next_token.token {
11353 Token::SingleQuotedString(s) => s,
11354 _ => return self.expected("ilike pattern", next_token),
11355 };
11356 Some(IlikeSelectItem { pattern })
11357 } else {
11358 None
11359 };
11360 Ok(opt_ilike)
11361 }
11362
11363 pub fn parse_optional_select_item_exclude(
11367 &mut self,
11368 ) -> Result<Option<ExcludeSelectItem>, ParserError> {
11369 let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) {
11370 if self.consume_token(&Token::LParen) {
11371 let columns =
11372 self.parse_comma_separated(|parser| parser.parse_identifier(false))?;
11373 self.expect_token(&Token::RParen)?;
11374 Some(ExcludeSelectItem::Multiple(columns))
11375 } else {
11376 let column = self.parse_identifier(false)?;
11377 Some(ExcludeSelectItem::Single(column))
11378 }
11379 } else {
11380 None
11381 };
11382
11383 Ok(opt_exclude)
11384 }
11385
11386 pub fn parse_optional_select_item_except(
11390 &mut self,
11391 ) -> Result<Option<ExceptSelectItem>, ParserError> {
11392 let opt_except = if self.parse_keyword(Keyword::EXCEPT) {
11393 if self.peek_token().token == Token::LParen {
11394 let idents = self.parse_parenthesized_column_list(Mandatory, false)?;
11395 match &idents[..] {
11396 [] => {
11397 return self.expected(
11398 "at least one column should be parsed by the expect clause",
11399 self.peek_token(),
11400 )?;
11401 }
11402 [first, idents @ ..] => Some(ExceptSelectItem {
11403 first_element: first.clone(),
11404 additional_elements: idents.to_vec(),
11405 }),
11406 }
11407 } else {
11408 let ident = self.parse_identifier(false)?;
11410 Some(ExceptSelectItem {
11411 first_element: ident,
11412 additional_elements: vec![],
11413 })
11414 }
11415 } else {
11416 None
11417 };
11418
11419 Ok(opt_except)
11420 }
11421
11422 pub fn parse_optional_select_item_rename(
11424 &mut self,
11425 ) -> Result<Option<RenameSelectItem>, ParserError> {
11426 let opt_rename = if self.parse_keyword(Keyword::RENAME) {
11427 if self.consume_token(&Token::LParen) {
11428 let idents =
11429 self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?;
11430 self.expect_token(&Token::RParen)?;
11431 Some(RenameSelectItem::Multiple(idents))
11432 } else {
11433 let ident = self.parse_identifier_with_alias()?;
11434 Some(RenameSelectItem::Single(ident))
11435 }
11436 } else {
11437 None
11438 };
11439
11440 Ok(opt_rename)
11441 }
11442
11443 pub fn parse_optional_select_item_replace(
11445 &mut self,
11446 ) -> Result<Option<ReplaceSelectItem>, ParserError> {
11447 let opt_replace = if self.parse_keyword(Keyword::REPLACE) {
11448 if self.consume_token(&Token::LParen) {
11449 let items = self.parse_comma_separated(|parser| {
11450 Ok(Box::new(parser.parse_replace_elements()?))
11451 })?;
11452 self.expect_token(&Token::RParen)?;
11453 Some(ReplaceSelectItem { items })
11454 } else {
11455 let tok = self.next_token();
11456 return self.expected("( after REPLACE but", tok);
11457 }
11458 } else {
11459 None
11460 };
11461
11462 Ok(opt_replace)
11463 }
11464 pub fn parse_replace_elements(&mut self) -> Result<ReplaceSelectElement, ParserError> {
11465 let expr = self.parse_expr()?;
11466 let as_keyword = self.parse_keyword(Keyword::AS);
11467 let ident = self.parse_identifier(false)?;
11468 Ok(ReplaceSelectElement {
11469 expr,
11470 column_name: ident,
11471 as_keyword,
11472 })
11473 }
11474
11475 pub fn parse_asc_desc(&mut self) -> Option<bool> {
11478 if self.parse_keyword(Keyword::ASC) {
11479 Some(true)
11480 } else if self.parse_keyword(Keyword::DESC) {
11481 Some(false)
11482 } else {
11483 None
11484 }
11485 }
11486
11487 pub fn parse_order_by_expr(&mut self) -> Result<OrderByExpr, ParserError> {
11489 let expr = self.parse_expr()?;
11490
11491 let asc = self.parse_asc_desc();
11492
11493 let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
11494 Some(true)
11495 } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
11496 Some(false)
11497 } else {
11498 None
11499 };
11500
11501 let with_fill = if dialect_of!(self is ClickHouseDialect | GenericDialect)
11502 && self.parse_keywords(&[Keyword::WITH, Keyword::FILL])
11503 {
11504 Some(self.parse_with_fill()?)
11505 } else {
11506 None
11507 };
11508
11509 Ok(OrderByExpr {
11510 expr,
11511 asc,
11512 nulls_first,
11513 with_fill,
11514 })
11515 }
11516
11517 pub fn parse_with_fill(&mut self) -> Result<WithFill, ParserError> {
11520 let from = if self.parse_keyword(Keyword::FROM) {
11521 Some(self.parse_expr()?)
11522 } else {
11523 None
11524 };
11525
11526 let to = if self.parse_keyword(Keyword::TO) {
11527 Some(self.parse_expr()?)
11528 } else {
11529 None
11530 };
11531
11532 let step = if self.parse_keyword(Keyword::STEP) {
11533 Some(self.parse_expr()?)
11534 } else {
11535 None
11536 };
11537
11538 Ok(WithFill { from, to, step })
11539 }
11540
11541 pub fn parse_interpolations(&mut self) -> Result<Option<Interpolate>, ParserError> {
11544 if !self.parse_keyword(Keyword::INTERPOLATE) {
11545 return Ok(None);
11546 }
11547
11548 if self.consume_token(&Token::LParen) {
11549 let interpolations =
11550 self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?;
11551 self.expect_token(&Token::RParen)?;
11552 return Ok(Some(Interpolate {
11554 exprs: Some(interpolations),
11555 }));
11556 }
11557
11558 Ok(Some(Interpolate { exprs: None }))
11560 }
11561
11562 pub fn parse_interpolation(&mut self) -> Result<InterpolateExpr, ParserError> {
11564 let column = self.parse_identifier(false)?;
11565 let expr = if self.parse_keyword(Keyword::AS) {
11566 Some(self.parse_expr()?)
11567 } else {
11568 None
11569 };
11570 Ok(InterpolateExpr { column, expr })
11571 }
11572
11573 pub fn parse_top(&mut self) -> Result<Top, ParserError> {
11576 let quantity = if self.consume_token(&Token::LParen) {
11577 let quantity = self.parse_expr()?;
11578 self.expect_token(&Token::RParen)?;
11579 Some(TopQuantity::Expr(quantity))
11580 } else {
11581 let next_token = self.next_token();
11582 let quantity = match next_token.token {
11583 Token::Number(s, _) => Self::parse::<u64>(s, next_token.location)?,
11584 _ => self.expected("literal int", next_token)?,
11585 };
11586 Some(TopQuantity::Constant(quantity))
11587 };
11588
11589 let percent = self.parse_keyword(Keyword::PERCENT);
11590
11591 let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
11592
11593 Ok(Top {
11594 with_ties,
11595 percent,
11596 quantity,
11597 })
11598 }
11599
11600 pub fn parse_limit(&mut self) -> Result<Option<Expr>, ParserError> {
11602 if self.parse_keyword(Keyword::ALL) {
11603 Ok(None)
11604 } else {
11605 Ok(Some(self.parse_expr()?))
11606 }
11607 }
11608
11609 pub fn parse_offset(&mut self) -> Result<Offset, ParserError> {
11611 let value = self.parse_expr()?;
11612 let rows = if self.parse_keyword(Keyword::ROW) {
11613 OffsetRows::Row
11614 } else if self.parse_keyword(Keyword::ROWS) {
11615 OffsetRows::Rows
11616 } else {
11617 OffsetRows::None
11618 };
11619 Ok(Offset { value, rows })
11620 }
11621
11622 pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
11624 self.expect_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT])?;
11625 let (quantity, percent) = if self
11626 .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
11627 .is_some()
11628 {
11629 (None, false)
11630 } else {
11631 let quantity = Expr::Value(self.parse_value()?);
11632 let percent = self.parse_keyword(Keyword::PERCENT);
11633 self.expect_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])?;
11634 (Some(quantity), percent)
11635 };
11636 let with_ties = if self.parse_keyword(Keyword::ONLY) {
11637 false
11638 } else if self.parse_keywords(&[Keyword::WITH, Keyword::TIES]) {
11639 true
11640 } else {
11641 return self.expected("one of ONLY or WITH TIES", self.peek_token());
11642 };
11643 Ok(Fetch {
11644 with_ties,
11645 percent,
11646 quantity,
11647 })
11648 }
11649
11650 pub fn parse_lock(&mut self) -> Result<LockClause, ParserError> {
11652 let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? {
11653 Keyword::UPDATE => LockType::Update,
11654 Keyword::SHARE => LockType::Share,
11655 _ => unreachable!(),
11656 };
11657 let of = if self.parse_keyword(Keyword::OF) {
11658 Some(self.parse_object_name(false)?)
11659 } else {
11660 None
11661 };
11662 let nonblock = if self.parse_keyword(Keyword::NOWAIT) {
11663 Some(NonBlock::Nowait)
11664 } else if self.parse_keywords(&[Keyword::SKIP, Keyword::LOCKED]) {
11665 Some(NonBlock::SkipLocked)
11666 } else {
11667 None
11668 };
11669 Ok(LockClause {
11670 lock_type,
11671 of,
11672 nonblock,
11673 })
11674 }
11675
11676 pub fn parse_values(&mut self, allow_empty: bool) -> Result<Values, ParserError> {
11677 let mut explicit_row = false;
11678
11679 let rows = self.parse_comma_separated(|parser| {
11680 if parser.parse_keyword(Keyword::ROW) {
11681 explicit_row = true;
11682 }
11683
11684 parser.expect_token(&Token::LParen)?;
11685 if allow_empty && parser.peek_token().token == Token::RParen {
11686 parser.next_token();
11687 Ok(vec![])
11688 } else {
11689 let exprs = parser.parse_comma_separated(Parser::parse_expr)?;
11690 parser.expect_token(&Token::RParen)?;
11691 Ok(exprs)
11692 }
11693 })?;
11694 Ok(Values { explicit_row, rows })
11695 }
11696
11697 pub fn parse_start_transaction(&mut self) -> Result<Statement, ParserError> {
11698 self.expect_keyword(Keyword::TRANSACTION)?;
11699 Ok(Statement::StartTransaction {
11700 modes: self.parse_transaction_modes()?,
11701 begin: false,
11702 modifier: None,
11703 })
11704 }
11705
11706 pub fn parse_begin(&mut self) -> Result<Statement, ParserError> {
11707 let modifier = if !self.dialect.supports_start_transaction_modifier() {
11708 None
11709 } else if self.parse_keyword(Keyword::DEFERRED) {
11710 Some(TransactionModifier::Deferred)
11711 } else if self.parse_keyword(Keyword::IMMEDIATE) {
11712 Some(TransactionModifier::Immediate)
11713 } else if self.parse_keyword(Keyword::EXCLUSIVE) {
11714 Some(TransactionModifier::Exclusive)
11715 } else {
11716 None
11717 };
11718 let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]);
11719 Ok(Statement::StartTransaction {
11720 modes: self.parse_transaction_modes()?,
11721 begin: true,
11722 modifier,
11723 })
11724 }
11725
11726 pub fn parse_end(&mut self) -> Result<Statement, ParserError> {
11727 Ok(Statement::Commit {
11728 chain: self.parse_commit_rollback_chain()?,
11729 })
11730 }
11731
11732 pub fn parse_transaction_modes(&mut self) -> Result<Vec<TransactionMode>, ParserError> {
11733 let mut modes = vec![];
11734 let mut required = false;
11735 loop {
11736 let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
11737 let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
11738 TransactionIsolationLevel::ReadUncommitted
11739 } else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
11740 TransactionIsolationLevel::ReadCommitted
11741 } else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
11742 TransactionIsolationLevel::RepeatableRead
11743 } else if self.parse_keyword(Keyword::SERIALIZABLE) {
11744 TransactionIsolationLevel::Serializable
11745 } else {
11746 self.expected("isolation level", self.peek_token())?
11747 };
11748 TransactionMode::IsolationLevel(iso_level)
11749 } else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
11750 TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
11751 } else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
11752 TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
11753 } else if required {
11754 self.expected("transaction mode", self.peek_token())?
11755 } else {
11756 break;
11757 };
11758 modes.push(mode);
11759 required = self.consume_token(&Token::Comma);
11764 }
11765 Ok(modes)
11766 }
11767
11768 pub fn parse_commit(&mut self) -> Result<Statement, ParserError> {
11769 Ok(Statement::Commit {
11770 chain: self.parse_commit_rollback_chain()?,
11771 })
11772 }
11773
11774 pub fn parse_rollback(&mut self) -> Result<Statement, ParserError> {
11775 let chain = self.parse_commit_rollback_chain()?;
11776 let savepoint = self.parse_rollback_savepoint()?;
11777
11778 Ok(Statement::Rollback { chain, savepoint })
11779 }
11780
11781 pub fn parse_commit_rollback_chain(&mut self) -> Result<bool, ParserError> {
11782 let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]);
11783 if self.parse_keyword(Keyword::AND) {
11784 let chain = !self.parse_keyword(Keyword::NO);
11785 self.expect_keyword(Keyword::CHAIN)?;
11786 Ok(chain)
11787 } else {
11788 Ok(false)
11789 }
11790 }
11791
11792 pub fn parse_rollback_savepoint(&mut self) -> Result<Option<Ident>, ParserError> {
11793 if self.parse_keyword(Keyword::TO) {
11794 let _ = self.parse_keyword(Keyword::SAVEPOINT);
11795 let savepoint = self.parse_identifier(false)?;
11796
11797 Ok(Some(savepoint))
11798 } else {
11799 Ok(None)
11800 }
11801 }
11802
11803 pub fn parse_deallocate(&mut self) -> Result<Statement, ParserError> {
11804 let prepare = self.parse_keyword(Keyword::PREPARE);
11805 let name = self.parse_identifier(false)?;
11806 Ok(Statement::Deallocate { name, prepare })
11807 }
11808
11809 pub fn parse_execute(&mut self) -> Result<Statement, ParserError> {
11810 let name = self.parse_object_name(false)?;
11811
11812 let has_parentheses = self.consume_token(&Token::LParen);
11813
11814 let end_token = match (has_parentheses, self.peek_token().token) {
11815 (true, _) => Token::RParen,
11816 (false, Token::EOF) => Token::EOF,
11817 (false, Token::Word(w)) if w.keyword == Keyword::USING => Token::Word(w),
11818 (false, _) => Token::SemiColon,
11819 };
11820
11821 let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?;
11822
11823 if has_parentheses {
11824 self.expect_token(&Token::RParen)?;
11825 }
11826
11827 let mut using = vec![];
11828 if self.parse_keyword(Keyword::USING) {
11829 using.push(self.parse_expr()?);
11830
11831 while self.consume_token(&Token::Comma) {
11832 using.push(self.parse_expr()?);
11833 }
11834 };
11835
11836 Ok(Statement::Execute {
11837 name,
11838 parameters,
11839 has_parentheses,
11840 using,
11841 })
11842 }
11843
11844 pub fn parse_prepare(&mut self) -> Result<Statement, ParserError> {
11845 let name = self.parse_identifier(false)?;
11846
11847 let mut data_types = vec![];
11848 if self.consume_token(&Token::LParen) {
11849 data_types = self.parse_comma_separated(Parser::parse_data_type)?;
11850 self.expect_token(&Token::RParen)?;
11851 }
11852
11853 self.expect_keyword(Keyword::AS)?;
11854 let statement = Box::new(self.parse_statement()?);
11855 Ok(Statement::Prepare {
11856 name,
11857 data_types,
11858 statement,
11859 })
11860 }
11861
11862 pub fn parse_unload(&mut self) -> Result<Statement, ParserError> {
11863 self.expect_token(&Token::LParen)?;
11864 let query = self.parse_query()?;
11865 self.expect_token(&Token::RParen)?;
11866
11867 self.expect_keyword(Keyword::TO)?;
11868 let to = self.parse_identifier(false)?;
11869
11870 let with_options = self.parse_options(Keyword::WITH)?;
11871
11872 Ok(Statement::Unload {
11873 query,
11874 to,
11875 with: with_options,
11876 })
11877 }
11878
11879 pub fn parse_merge_clauses(&mut self) -> Result<Vec<MergeClause>, ParserError> {
11880 let mut clauses = vec![];
11881 loop {
11882 if self.peek_token() == Token::EOF || self.peek_token() == Token::SemiColon {
11883 break;
11884 }
11885 self.expect_keyword(Keyword::WHEN)?;
11886
11887 let mut clause_kind = MergeClauseKind::Matched;
11888 if self.parse_keyword(Keyword::NOT) {
11889 clause_kind = MergeClauseKind::NotMatched;
11890 }
11891 self.expect_keyword(Keyword::MATCHED)?;
11892
11893 if matches!(clause_kind, MergeClauseKind::NotMatched)
11894 && self.parse_keywords(&[Keyword::BY, Keyword::SOURCE])
11895 {
11896 clause_kind = MergeClauseKind::NotMatchedBySource;
11897 } else if matches!(clause_kind, MergeClauseKind::NotMatched)
11898 && self.parse_keywords(&[Keyword::BY, Keyword::TARGET])
11899 {
11900 clause_kind = MergeClauseKind::NotMatchedByTarget;
11901 }
11902
11903 let predicate = if self.parse_keyword(Keyword::AND) {
11904 Some(self.parse_expr()?)
11905 } else {
11906 None
11907 };
11908
11909 self.expect_keyword(Keyword::THEN)?;
11910
11911 let merge_clause = match self.parse_one_of_keywords(&[
11912 Keyword::UPDATE,
11913 Keyword::INSERT,
11914 Keyword::DELETE,
11915 ]) {
11916 Some(Keyword::UPDATE) => {
11917 if matches!(
11918 clause_kind,
11919 MergeClauseKind::NotMatched | MergeClauseKind::NotMatchedByTarget
11920 ) {
11921 return Err(ParserError::ParserError(format!(
11922 "UPDATE is not allowed in a {clause_kind} merge clause"
11923 )));
11924 }
11925 self.expect_keyword(Keyword::SET)?;
11926 MergeAction::Update {
11927 assignments: self.parse_comma_separated(Parser::parse_assignment)?,
11928 }
11929 }
11930 Some(Keyword::DELETE) => {
11931 if matches!(
11932 clause_kind,
11933 MergeClauseKind::NotMatched | MergeClauseKind::NotMatchedByTarget
11934 ) {
11935 return Err(ParserError::ParserError(format!(
11936 "DELETE is not allowed in a {clause_kind} merge clause"
11937 )));
11938 }
11939 MergeAction::Delete
11940 }
11941 Some(Keyword::INSERT) => {
11942 if !matches!(
11943 clause_kind,
11944 MergeClauseKind::NotMatched | MergeClauseKind::NotMatchedByTarget
11945 ) {
11946 return Err(ParserError::ParserError(format!(
11947 "INSERT is not allowed in a {clause_kind} merge clause"
11948 )));
11949 }
11950 let is_mysql = dialect_of!(self is MySqlDialect);
11951
11952 let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?;
11953 let kind = if dialect_of!(self is BigQueryDialect | GenericDialect)
11954 && self.parse_keyword(Keyword::ROW)
11955 {
11956 MergeInsertKind::Row
11957 } else {
11958 self.expect_keyword(Keyword::VALUES)?;
11959 let values = self.parse_values(is_mysql)?;
11960 MergeInsertKind::Values(values)
11961 };
11962 MergeAction::Insert(MergeInsertExpr { columns, kind })
11963 }
11964 _ => {
11965 return Err(ParserError::ParserError(
11966 "expected UPDATE, DELETE or INSERT in merge clause".to_string(),
11967 ));
11968 }
11969 };
11970 clauses.push(MergeClause {
11971 clause_kind,
11972 predicate,
11973 action: merge_clause,
11974 });
11975 }
11976 Ok(clauses)
11977 }
11978
11979 pub fn parse_merge(&mut self) -> Result<Statement, ParserError> {
11980 let into = self.parse_keyword(Keyword::INTO);
11981
11982 let table = self.parse_table_factor()?;
11983
11984 self.expect_keyword(Keyword::USING)?;
11985 let source = self.parse_table_factor()?;
11986 self.expect_keyword(Keyword::ON)?;
11987 let on = self.parse_expr()?;
11988 let clauses = self.parse_merge_clauses()?;
11989
11990 Ok(Statement::Merge {
11991 into,
11992 table,
11993 source,
11994 on: Box::new(on),
11995 clauses,
11996 })
11997 }
11998
11999 fn parse_pragma_value(&mut self) -> Result<Value, ParserError> {
12000 match self.parse_value()? {
12001 v @ Value::SingleQuotedString(_) => Ok(v),
12002 v @ Value::DoubleQuotedString(_) => Ok(v),
12003 v @ Value::Number(_, _) => Ok(v),
12004 v @ Value::Placeholder(_) => Ok(v),
12005 _ => {
12006 self.prev_token();
12007 self.expected("number or string or ? placeholder", self.peek_token())
12008 }
12009 }
12010 }
12011
12012 pub fn parse_pragma(&mut self) -> Result<Statement, ParserError> {
12014 let name = self.parse_object_name(false)?;
12015 if self.consume_token(&Token::LParen) {
12016 let value = self.parse_pragma_value()?;
12017 self.expect_token(&Token::RParen)?;
12018 Ok(Statement::Pragma {
12019 name,
12020 value: Some(value),
12021 is_eq: false,
12022 })
12023 } else if self.consume_token(&Token::Eq) {
12024 Ok(Statement::Pragma {
12025 name,
12026 value: Some(self.parse_pragma_value()?),
12027 is_eq: true,
12028 })
12029 } else {
12030 Ok(Statement::Pragma {
12031 name,
12032 value: None,
12033 is_eq: false,
12034 })
12035 }
12036 }
12037
12038 pub fn parse_install(&mut self) -> Result<Statement, ParserError> {
12040 let extension_name = self.parse_identifier(false)?;
12041
12042 Ok(Statement::Install { extension_name })
12043 }
12044
12045 pub fn parse_load(&mut self) -> Result<Statement, ParserError> {
12047 let extension_name = self.parse_identifier(false)?;
12048 Ok(Statement::Load { extension_name })
12049 }
12050
12051 pub fn parse_optimize_table(&mut self) -> Result<Statement, ParserError> {
12056 self.expect_keyword(Keyword::TABLE)?;
12057 let name = self.parse_object_name(false)?;
12058 let on_cluster = self.parse_optional_on_cluster()?;
12059
12060 let partition = if self.parse_keyword(Keyword::PARTITION) {
12061 if self.parse_keyword(Keyword::ID) {
12062 Some(Partition::Identifier(self.parse_identifier(false)?))
12063 } else {
12064 Some(Partition::Expr(self.parse_expr()?))
12065 }
12066 } else {
12067 None
12068 };
12069
12070 let include_final = self.parse_keyword(Keyword::FINAL);
12071 let deduplicate = if self.parse_keyword(Keyword::DEDUPLICATE) {
12072 if self.parse_keyword(Keyword::BY) {
12073 Some(Deduplicate::ByExpression(self.parse_expr()?))
12074 } else {
12075 Some(Deduplicate::All)
12076 }
12077 } else {
12078 None
12079 };
12080
12081 Ok(Statement::OptimizeTable {
12082 name,
12083 on_cluster,
12084 partition,
12085 include_final,
12086 deduplicate,
12087 })
12088 }
12089
12090 pub fn parse_create_sequence(&mut self, temporary: bool) -> Result<Statement, ParserError> {
12096 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
12098 let name = self.parse_object_name(false)?;
12100 let mut data_type: Option<DataType> = None;
12102 if self.parse_keywords(&[Keyword::AS]) {
12103 data_type = Some(self.parse_data_type()?)
12104 }
12105 let sequence_options = self.parse_create_sequence_options()?;
12106 let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) {
12108 if self.parse_keywords(&[Keyword::NONE]) {
12109 Some(ObjectName(vec![Ident::new("NONE")]))
12110 } else {
12111 Some(self.parse_object_name(false)?)
12112 }
12113 } else {
12114 None
12115 };
12116 Ok(Statement::CreateSequence {
12117 temporary,
12118 if_not_exists,
12119 name,
12120 data_type,
12121 sequence_options,
12122 owned_by,
12123 })
12124 }
12125
12126 fn parse_create_sequence_options(&mut self) -> Result<Vec<SequenceOptions>, ParserError> {
12127 let mut sequence_options = vec![];
12128 if self.parse_keywords(&[Keyword::INCREMENT]) {
12130 if self.parse_keywords(&[Keyword::BY]) {
12131 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, true));
12132 } else {
12133 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, false));
12134 }
12135 }
12136 if self.parse_keyword(Keyword::MINVALUE) {
12138 sequence_options.push(SequenceOptions::MinValue(Some(self.parse_number()?)));
12139 } else if self.parse_keywords(&[Keyword::NO, Keyword::MINVALUE]) {
12140 sequence_options.push(SequenceOptions::MinValue(None));
12141 }
12142 if self.parse_keywords(&[Keyword::MAXVALUE]) {
12144 sequence_options.push(SequenceOptions::MaxValue(Some(self.parse_number()?)));
12145 } else if self.parse_keywords(&[Keyword::NO, Keyword::MAXVALUE]) {
12146 sequence_options.push(SequenceOptions::MaxValue(None));
12147 }
12148
12149 if self.parse_keywords(&[Keyword::START]) {
12151 if self.parse_keywords(&[Keyword::WITH]) {
12152 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, true));
12153 } else {
12154 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, false));
12155 }
12156 }
12157 if self.parse_keywords(&[Keyword::CACHE]) {
12159 sequence_options.push(SequenceOptions::Cache(self.parse_number()?));
12160 }
12161 if self.parse_keywords(&[Keyword::NO, Keyword::CYCLE]) {
12163 sequence_options.push(SequenceOptions::Cycle(true));
12164 } else if self.parse_keywords(&[Keyword::CYCLE]) {
12165 sequence_options.push(SequenceOptions::Cycle(false));
12166 }
12167
12168 Ok(sequence_options)
12169 }
12170
12171 pub fn index(&self) -> usize {
12173 self.index
12174 }
12175
12176 pub fn parse_named_window(&mut self) -> Result<NamedWindowDefinition, ParserError> {
12177 let ident = self.parse_identifier(false)?;
12178 self.expect_keyword(Keyword::AS)?;
12179
12180 let window_expr = if self.consume_token(&Token::LParen) {
12181 NamedWindowExpr::WindowSpec(self.parse_window_spec()?)
12182 } else if self.dialect.supports_window_clause_named_window_reference() {
12183 NamedWindowExpr::NamedWindow(self.parse_identifier(false)?)
12184 } else {
12185 return self.expected("(", self.peek_token());
12186 };
12187
12188 Ok(NamedWindowDefinition(ident, window_expr))
12189 }
12190
12191 pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result<Statement, ParserError> {
12192 let name = self.parse_object_name(false)?;
12193 let params = self.parse_optional_procedure_parameters()?;
12194 self.expect_keyword(Keyword::AS)?;
12195 self.expect_keyword(Keyword::BEGIN)?;
12196 let statements = self.parse_statements()?;
12197 self.expect_keyword(Keyword::END)?;
12198 Ok(Statement::CreateProcedure {
12199 name,
12200 or_alter,
12201 params,
12202 body: statements,
12203 })
12204 }
12205
12206 pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
12207 let window_name = match self.peek_token().token {
12208 Token::Word(word) if word.keyword == Keyword::NoKeyword => {
12209 self.parse_optional_indent()?
12210 }
12211 _ => None,
12212 };
12213
12214 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
12215 self.parse_comma_separated(Parser::parse_expr)?
12216 } else {
12217 vec![]
12218 };
12219 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
12220 self.parse_comma_separated(Parser::parse_order_by_expr)?
12221 } else {
12222 vec![]
12223 };
12224
12225 let window_frame = if !self.consume_token(&Token::RParen) {
12226 let window_frame = self.parse_window_frame()?;
12227 self.expect_token(&Token::RParen)?;
12228 Some(window_frame)
12229 } else {
12230 None
12231 };
12232 Ok(WindowSpec {
12233 window_name,
12234 partition_by,
12235 order_by,
12236 window_frame,
12237 })
12238 }
12239
12240 pub fn parse_create_type(&mut self) -> Result<Statement, ParserError> {
12241 let name = self.parse_object_name(false)?;
12242 self.expect_keyword(Keyword::AS)?;
12243
12244 let mut attributes = vec![];
12245 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
12246 return Ok(Statement::CreateType {
12247 name,
12248 representation: UserDefinedTypeRepresentation::Composite { attributes },
12249 });
12250 }
12251
12252 loop {
12253 let attr_name = self.parse_identifier(false)?;
12254 let attr_data_type = self.parse_data_type()?;
12255 let attr_collation = if self.parse_keyword(Keyword::COLLATE) {
12256 Some(self.parse_object_name(false)?)
12257 } else {
12258 None
12259 };
12260 attributes.push(UserDefinedTypeCompositeAttributeDef {
12261 name: attr_name,
12262 data_type: attr_data_type,
12263 collation: attr_collation,
12264 });
12265 let comma = self.consume_token(&Token::Comma);
12266 if self.consume_token(&Token::RParen) {
12267 break;
12269 } else if !comma {
12270 return self.expected("',' or ')' after attribute definition", self.peek_token());
12271 }
12272 }
12273
12274 Ok(Statement::CreateType {
12275 name,
12276 representation: UserDefinedTypeRepresentation::Composite { attributes },
12277 })
12278 }
12279
12280 fn parse_parenthesized_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
12281 self.expect_token(&Token::LParen)?;
12282 let partitions = self.parse_comma_separated(|p| p.parse_identifier(false))?;
12283 self.expect_token(&Token::RParen)?;
12284 Ok(partitions)
12285 }
12286
12287 fn parse_column_position(&mut self) -> Result<Option<MySQLColumnPosition>, ParserError> {
12288 if dialect_of!(self is MySqlDialect | GenericDialect) {
12289 if self.parse_keyword(Keyword::FIRST) {
12290 Ok(Some(MySQLColumnPosition::First))
12291 } else if self.parse_keyword(Keyword::AFTER) {
12292 let ident = self.parse_identifier(false)?;
12293 Ok(Some(MySQLColumnPosition::After(ident)))
12294 } else {
12295 Ok(None)
12296 }
12297 } else {
12298 Ok(None)
12299 }
12300 }
12301
12302 pub fn into_tokens(self) -> Vec<TokenWithLocation> {
12304 self.tokens
12305 }
12306
12307 fn peek_sub_query(&mut self) -> bool {
12309 if self
12310 .parse_one_of_keywords(&[Keyword::SELECT, Keyword::WITH])
12311 .is_some()
12312 {
12313 self.prev_token();
12314 return true;
12315 }
12316 false
12317 }
12318}
12319
12320impl Word {
12321 pub fn to_ident(&self) -> Ident {
12322 Ident {
12323 value: self.value.clone(),
12324 quote_style: self.quote_style,
12325 }
12326 }
12327}
12328
12329#[cfg(test)]
12330mod tests {
12331 use crate::test_utils::{all_dialects, TestedDialects};
12332
12333 use super::*;
12334
12335 #[test]
12336 fn test_prev_index() {
12337 let sql = "SELECT version";
12338 all_dialects().run_parser_method(sql, |parser| {
12339 assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
12340 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
12341 parser.prev_token();
12342 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
12343 assert_eq!(parser.next_token(), Token::make_word("version", None));
12344 parser.prev_token();
12345 assert_eq!(parser.peek_token(), Token::make_word("version", None));
12346 assert_eq!(parser.next_token(), Token::make_word("version", None));
12347 assert_eq!(parser.peek_token(), Token::EOF);
12348 parser.prev_token();
12349 assert_eq!(parser.next_token(), Token::make_word("version", None));
12350 assert_eq!(parser.next_token(), Token::EOF);
12351 assert_eq!(parser.next_token(), Token::EOF);
12352 parser.prev_token();
12353 });
12354 }
12355
12356 #[test]
12357 fn test_peek_tokens() {
12358 all_dialects().run_parser_method("SELECT foo AS bar FROM baz", |parser| {
12359 assert!(matches!(
12360 parser.peek_tokens(),
12361 [Token::Word(Word {
12362 keyword: Keyword::SELECT,
12363 ..
12364 })]
12365 ));
12366
12367 assert!(matches!(
12368 parser.peek_tokens(),
12369 [
12370 Token::Word(Word {
12371 keyword: Keyword::SELECT,
12372 ..
12373 }),
12374 Token::Word(_),
12375 Token::Word(Word {
12376 keyword: Keyword::AS,
12377 ..
12378 }),
12379 ]
12380 ));
12381
12382 for _ in 0..4 {
12383 parser.next_token();
12384 }
12385
12386 assert!(matches!(
12387 parser.peek_tokens(),
12388 [
12389 Token::Word(Word {
12390 keyword: Keyword::FROM,
12391 ..
12392 }),
12393 Token::Word(_),
12394 Token::EOF,
12395 Token::EOF,
12396 ]
12397 ))
12398 })
12399 }
12400
12401 #[cfg(test)]
12402 mod test_parse_data_type {
12403 use crate::ast::{
12404 CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ObjectName, TimezoneInfo,
12405 };
12406 use crate::dialect::{AnsiDialect, GenericDialect};
12407 use crate::test_utils::TestedDialects;
12408
12409 macro_rules! test_parse_data_type {
12410 ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{
12411 $dialect.run_parser_method(&*$input, |parser| {
12412 let data_type = parser.parse_data_type().unwrap();
12413 assert_eq!($expected_type, data_type);
12414 assert_eq!($input.to_string(), data_type.to_string());
12415 });
12416 }};
12417 }
12418
12419 #[test]
12420 fn test_ansii_character_string_types() {
12421 let dialect =
12423 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
12424
12425 test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
12426
12427 test_parse_data_type!(
12428 dialect,
12429 "CHARACTER(20)",
12430 DataType::Character(Some(CharacterLength::IntegerLength {
12431 length: 20,
12432 unit: None
12433 }))
12434 );
12435
12436 test_parse_data_type!(
12437 dialect,
12438 "CHARACTER(20 CHARACTERS)",
12439 DataType::Character(Some(CharacterLength::IntegerLength {
12440 length: 20,
12441 unit: Some(CharLengthUnits::Characters)
12442 }))
12443 );
12444
12445 test_parse_data_type!(
12446 dialect,
12447 "CHARACTER(20 OCTETS)",
12448 DataType::Character(Some(CharacterLength::IntegerLength {
12449 length: 20,
12450 unit: Some(CharLengthUnits::Octets)
12451 }))
12452 );
12453
12454 test_parse_data_type!(dialect, "CHAR", DataType::Char(None));
12455
12456 test_parse_data_type!(
12457 dialect,
12458 "CHAR(20)",
12459 DataType::Char(Some(CharacterLength::IntegerLength {
12460 length: 20,
12461 unit: None
12462 }))
12463 );
12464
12465 test_parse_data_type!(
12466 dialect,
12467 "CHAR(20 CHARACTERS)",
12468 DataType::Char(Some(CharacterLength::IntegerLength {
12469 length: 20,
12470 unit: Some(CharLengthUnits::Characters)
12471 }))
12472 );
12473
12474 test_parse_data_type!(
12475 dialect,
12476 "CHAR(20 OCTETS)",
12477 DataType::Char(Some(CharacterLength::IntegerLength {
12478 length: 20,
12479 unit: Some(CharLengthUnits::Octets)
12480 }))
12481 );
12482
12483 test_parse_data_type!(
12484 dialect,
12485 "CHARACTER VARYING(20)",
12486 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
12487 length: 20,
12488 unit: None
12489 }))
12490 );
12491
12492 test_parse_data_type!(
12493 dialect,
12494 "CHARACTER VARYING(20 CHARACTERS)",
12495 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
12496 length: 20,
12497 unit: Some(CharLengthUnits::Characters)
12498 }))
12499 );
12500
12501 test_parse_data_type!(
12502 dialect,
12503 "CHARACTER VARYING(20 OCTETS)",
12504 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
12505 length: 20,
12506 unit: Some(CharLengthUnits::Octets)
12507 }))
12508 );
12509
12510 test_parse_data_type!(
12511 dialect,
12512 "CHAR VARYING(20)",
12513 DataType::CharVarying(Some(CharacterLength::IntegerLength {
12514 length: 20,
12515 unit: None
12516 }))
12517 );
12518
12519 test_parse_data_type!(
12520 dialect,
12521 "CHAR VARYING(20 CHARACTERS)",
12522 DataType::CharVarying(Some(CharacterLength::IntegerLength {
12523 length: 20,
12524 unit: Some(CharLengthUnits::Characters)
12525 }))
12526 );
12527
12528 test_parse_data_type!(
12529 dialect,
12530 "CHAR VARYING(20 OCTETS)",
12531 DataType::CharVarying(Some(CharacterLength::IntegerLength {
12532 length: 20,
12533 unit: Some(CharLengthUnits::Octets)
12534 }))
12535 );
12536
12537 test_parse_data_type!(
12538 dialect,
12539 "VARCHAR(20)",
12540 DataType::Varchar(Some(CharacterLength::IntegerLength {
12541 length: 20,
12542 unit: None
12543 }))
12544 );
12545 }
12546
12547 #[test]
12548 fn test_ansii_character_large_object_types() {
12549 let dialect =
12551 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
12552
12553 test_parse_data_type!(
12554 dialect,
12555 "CHARACTER LARGE OBJECT",
12556 DataType::CharacterLargeObject(None)
12557 );
12558 test_parse_data_type!(
12559 dialect,
12560 "CHARACTER LARGE OBJECT(20)",
12561 DataType::CharacterLargeObject(Some(20))
12562 );
12563
12564 test_parse_data_type!(
12565 dialect,
12566 "CHAR LARGE OBJECT",
12567 DataType::CharLargeObject(None)
12568 );
12569 test_parse_data_type!(
12570 dialect,
12571 "CHAR LARGE OBJECT(20)",
12572 DataType::CharLargeObject(Some(20))
12573 );
12574
12575 test_parse_data_type!(dialect, "CLOB", DataType::Clob(None));
12576 test_parse_data_type!(dialect, "CLOB(20)", DataType::Clob(Some(20)));
12577 }
12578
12579 #[test]
12580 fn test_parse_custom_types() {
12581 let dialect =
12582 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
12583
12584 test_parse_data_type!(
12585 dialect,
12586 "GEOMETRY",
12587 DataType::Custom(ObjectName(vec!["GEOMETRY".into()]), vec![])
12588 );
12589
12590 test_parse_data_type!(
12591 dialect,
12592 "GEOMETRY(POINT)",
12593 DataType::Custom(
12594 ObjectName(vec!["GEOMETRY".into()]),
12595 vec!["POINT".to_string()]
12596 )
12597 );
12598
12599 test_parse_data_type!(
12600 dialect,
12601 "GEOMETRY(POINT, 4326)",
12602 DataType::Custom(
12603 ObjectName(vec!["GEOMETRY".into()]),
12604 vec!["POINT".to_string(), "4326".to_string()]
12605 )
12606 );
12607 }
12608
12609 #[test]
12610 fn test_ansii_exact_numeric_types() {
12611 let dialect =
12613 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
12614
12615 test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
12616
12617 test_parse_data_type!(
12618 dialect,
12619 "NUMERIC(2)",
12620 DataType::Numeric(ExactNumberInfo::Precision(2))
12621 );
12622
12623 test_parse_data_type!(
12624 dialect,
12625 "NUMERIC(2,10)",
12626 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(2, 10))
12627 );
12628
12629 test_parse_data_type!(dialect, "DECIMAL", DataType::Decimal(ExactNumberInfo::None));
12630
12631 test_parse_data_type!(
12632 dialect,
12633 "DECIMAL(2)",
12634 DataType::Decimal(ExactNumberInfo::Precision(2))
12635 );
12636
12637 test_parse_data_type!(
12638 dialect,
12639 "DECIMAL(2,10)",
12640 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(2, 10))
12641 );
12642
12643 test_parse_data_type!(dialect, "DEC", DataType::Dec(ExactNumberInfo::None));
12644
12645 test_parse_data_type!(
12646 dialect,
12647 "DEC(2)",
12648 DataType::Dec(ExactNumberInfo::Precision(2))
12649 );
12650
12651 test_parse_data_type!(
12652 dialect,
12653 "DEC(2,10)",
12654 DataType::Dec(ExactNumberInfo::PrecisionAndScale(2, 10))
12655 );
12656 }
12657
12658 #[test]
12659 fn test_ansii_date_type() {
12660 let dialect =
12662 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
12663
12664 test_parse_data_type!(dialect, "DATE", DataType::Date);
12665
12666 test_parse_data_type!(dialect, "TIME", DataType::Time(None, TimezoneInfo::None));
12667
12668 test_parse_data_type!(
12669 dialect,
12670 "TIME(6)",
12671 DataType::Time(Some(6), TimezoneInfo::None)
12672 );
12673
12674 test_parse_data_type!(
12675 dialect,
12676 "TIME WITH TIME ZONE",
12677 DataType::Time(None, TimezoneInfo::WithTimeZone)
12678 );
12679
12680 test_parse_data_type!(
12681 dialect,
12682 "TIME(6) WITH TIME ZONE",
12683 DataType::Time(Some(6), TimezoneInfo::WithTimeZone)
12684 );
12685
12686 test_parse_data_type!(
12687 dialect,
12688 "TIME WITHOUT TIME ZONE",
12689 DataType::Time(None, TimezoneInfo::WithoutTimeZone)
12690 );
12691
12692 test_parse_data_type!(
12693 dialect,
12694 "TIME(6) WITHOUT TIME ZONE",
12695 DataType::Time(Some(6), TimezoneInfo::WithoutTimeZone)
12696 );
12697
12698 test_parse_data_type!(
12699 dialect,
12700 "TIMESTAMP",
12701 DataType::Timestamp(None, TimezoneInfo::None)
12702 );
12703
12704 test_parse_data_type!(
12705 dialect,
12706 "TIMESTAMP(22)",
12707 DataType::Timestamp(Some(22), TimezoneInfo::None)
12708 );
12709
12710 test_parse_data_type!(
12711 dialect,
12712 "TIMESTAMP(22) WITH TIME ZONE",
12713 DataType::Timestamp(Some(22), TimezoneInfo::WithTimeZone)
12714 );
12715
12716 test_parse_data_type!(
12717 dialect,
12718 "TIMESTAMP(33) WITHOUT TIME ZONE",
12719 DataType::Timestamp(Some(33), TimezoneInfo::WithoutTimeZone)
12720 );
12721 }
12722 }
12723
12724 #[test]
12725 fn test_parse_schema_name() {
12726 macro_rules! test_parse_schema_name {
12728 ($input:expr, $expected_name:expr $(,)?) => {{
12729 all_dialects().run_parser_method(&*$input, |parser| {
12730 let schema_name = parser.parse_schema_name().unwrap();
12731 assert_eq!(schema_name, $expected_name);
12733 assert_eq!(schema_name.to_string(), $input.to_string());
12735 });
12736 }};
12737 }
12738
12739 let dummy_name = ObjectName(vec![Ident::new("dummy_name")]);
12740 let dummy_authorization = Ident::new("dummy_authorization");
12741
12742 test_parse_schema_name!(
12743 format!("{dummy_name}"),
12744 SchemaName::Simple(dummy_name.clone())
12745 );
12746
12747 test_parse_schema_name!(
12748 format!("AUTHORIZATION {dummy_authorization}"),
12749 SchemaName::UnnamedAuthorization(dummy_authorization.clone()),
12750 );
12751 test_parse_schema_name!(
12752 format!("{dummy_name} AUTHORIZATION {dummy_authorization}"),
12753 SchemaName::NamedAuthorization(dummy_name.clone(), dummy_authorization.clone()),
12754 );
12755 }
12756
12757 #[test]
12758 fn mysql_parse_index_table_constraint() {
12759 macro_rules! test_parse_table_constraint {
12760 ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{
12761 $dialect.run_parser_method(&*$input, |parser| {
12762 let constraint = parser.parse_optional_table_constraint().unwrap().unwrap();
12763 assert_eq!(constraint, $expected);
12765 assert_eq!(constraint.to_string(), $input.to_string());
12767 });
12768 }};
12769 }
12770
12771 let dialect =
12772 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
12773
12774 test_parse_table_constraint!(
12775 dialect,
12776 "INDEX (c1)",
12777 TableConstraint::Index {
12778 display_as_key: false,
12779 name: None,
12780 index_type: None,
12781 columns: vec![Ident::new("c1")],
12782 }
12783 );
12784
12785 test_parse_table_constraint!(
12786 dialect,
12787 "KEY (c1)",
12788 TableConstraint::Index {
12789 display_as_key: true,
12790 name: None,
12791 index_type: None,
12792 columns: vec![Ident::new("c1")],
12793 }
12794 );
12795
12796 test_parse_table_constraint!(
12797 dialect,
12798 "INDEX 'index' (c1, c2)",
12799 TableConstraint::Index {
12800 display_as_key: false,
12801 name: Some(Ident::with_quote('\'', "index")),
12802 index_type: None,
12803 columns: vec![Ident::new("c1"), Ident::new("c2")],
12804 }
12805 );
12806
12807 test_parse_table_constraint!(
12808 dialect,
12809 "INDEX USING BTREE (c1)",
12810 TableConstraint::Index {
12811 display_as_key: false,
12812 name: None,
12813 index_type: Some(IndexType::BTree),
12814 columns: vec![Ident::new("c1")],
12815 }
12816 );
12817
12818 test_parse_table_constraint!(
12819 dialect,
12820 "INDEX USING HASH (c1)",
12821 TableConstraint::Index {
12822 display_as_key: false,
12823 name: None,
12824 index_type: Some(IndexType::Hash),
12825 columns: vec![Ident::new("c1")],
12826 }
12827 );
12828
12829 test_parse_table_constraint!(
12830 dialect,
12831 "INDEX idx_name USING BTREE (c1)",
12832 TableConstraint::Index {
12833 display_as_key: false,
12834 name: Some(Ident::new("idx_name")),
12835 index_type: Some(IndexType::BTree),
12836 columns: vec![Ident::new("c1")],
12837 }
12838 );
12839
12840 test_parse_table_constraint!(
12841 dialect,
12842 "INDEX idx_name USING HASH (c1)",
12843 TableConstraint::Index {
12844 display_as_key: false,
12845 name: Some(Ident::new("idx_name")),
12846 index_type: Some(IndexType::Hash),
12847 columns: vec![Ident::new("c1")],
12848 }
12849 );
12850 }
12851
12852 #[test]
12853 fn test_tokenizer_error_loc() {
12854 let sql = "foo '";
12855 let ast = Parser::parse_sql(&GenericDialect, sql);
12856 assert_eq!(
12857 ast,
12858 Err(ParserError::TokenizerError(
12859 "Unterminated string literal at Line: 1, Column: 5".to_string()
12860 ))
12861 );
12862 }
12863
12864 #[test]
12865 fn test_parser_error_loc() {
12866 let sql = "SELECT this is a syntax error";
12867 let ast = Parser::parse_sql(&GenericDialect, sql);
12868 assert_eq!(
12869 ast,
12870 Err(ParserError::ParserError(
12871 "Expected: [NOT] NULL or TRUE|FALSE or [NOT] DISTINCT FROM after IS, found: a at Line: 1, Column: 16"
12872 .to_string()
12873 ))
12874 );
12875 }
12876
12877 #[test]
12878 fn test_nested_explain_error() {
12879 let sql = "EXPLAIN EXPLAIN SELECT 1";
12880 let ast = Parser::parse_sql(&GenericDialect, sql);
12881 assert_eq!(
12882 ast,
12883 Err(ParserError::ParserError(
12884 "Explain must be root of the plan".to_string()
12885 ))
12886 );
12887 }
12888
12889 #[test]
12890 fn test_parse_multipart_identifier_positive() {
12891 let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
12892
12893 let expected = vec![
12895 Ident {
12896 value: "CATALOG".to_string(),
12897 quote_style: None,
12898 },
12899 Ident {
12900 value: "F(o)o. \"bar".to_string(),
12901 quote_style: Some('"'),
12902 },
12903 Ident {
12904 value: "table".to_string(),
12905 quote_style: None,
12906 },
12907 ];
12908 dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| {
12909 let actual = parser.parse_multipart_identifier().unwrap();
12910 assert_eq!(expected, actual);
12911 });
12912
12913 let expected = vec![
12915 Ident {
12916 value: "CATALOG".to_string(),
12917 quote_style: None,
12918 },
12919 Ident {
12920 value: "table".to_string(),
12921 quote_style: None,
12922 },
12923 ];
12924 dialect.run_parser_method("CATALOG . table", |parser| {
12925 let actual = parser.parse_multipart_identifier().unwrap();
12926 assert_eq!(expected, actual);
12927 });
12928 }
12929
12930 #[test]
12931 fn test_parse_multipart_identifier_negative() {
12932 macro_rules! test_parse_multipart_identifier_error {
12933 ($input:expr, $expected_err:expr $(,)?) => {{
12934 all_dialects().run_parser_method(&*$input, |parser| {
12935 let actual_err = parser.parse_multipart_identifier().unwrap_err();
12936 assert_eq!(actual_err.to_string(), $expected_err);
12937 });
12938 }};
12939 }
12940
12941 test_parse_multipart_identifier_error!(
12942 "",
12943 "sql parser error: Empty input when parsing identifier",
12944 );
12945
12946 test_parse_multipart_identifier_error!(
12947 "*schema.table",
12948 "sql parser error: Unexpected token in identifier: *",
12949 );
12950
12951 test_parse_multipart_identifier_error!(
12952 "schema.table*",
12953 "sql parser error: Unexpected token in identifier: *",
12954 );
12955
12956 test_parse_multipart_identifier_error!(
12957 "schema.table.",
12958 "sql parser error: Trailing period in identifier",
12959 );
12960
12961 test_parse_multipart_identifier_error!(
12962 "schema.*",
12963 "sql parser error: Unexpected token following period in identifier: *",
12964 );
12965 }
12966
12967 #[test]
12968 fn test_mysql_partition_selection() {
12969 let sql = "SELECT * FROM employees PARTITION (p0, p2)";
12970 let expected = vec!["p0", "p2"];
12971
12972 let ast: Vec<Statement> = Parser::parse_sql(&MySqlDialect {}, sql).unwrap();
12973 assert_eq!(ast.len(), 1);
12974 if let Statement::Query(v) = &ast[0] {
12975 if let SetExpr::Select(select) = &*v.body {
12976 assert_eq!(select.from.len(), 1);
12977 let from: &TableWithJoins = &select.from[0];
12978 let table_factor = &from.relation;
12979 if let TableFactor::Table { partitions, .. } = table_factor {
12980 let actual: Vec<&str> = partitions
12981 .iter()
12982 .map(|ident| ident.value.as_str())
12983 .collect();
12984 assert_eq!(expected, actual);
12985 }
12986 }
12987 } else {
12988 panic!("fail to parse mysql partition selection");
12989 }
12990 }
12991
12992 #[test]
12993 fn test_replace_into_placeholders() {
12994 let sql = "REPLACE INTO t (a) VALUES (&a)";
12995
12996 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
12997 }
12998
12999 #[test]
13000 fn test_replace_into_set() {
13001 let sql = "REPLACE INTO t SET a='1'";
13005
13006 assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
13007 }
13008
13009 #[test]
13010 fn test_replace_into_set_placeholder() {
13011 let sql = "REPLACE INTO t SET ?";
13012
13013 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
13014 }
13015
13016 #[test]
13017 fn test_replace_incomplete() {
13018 let sql = r#"REPLACE"#;
13019
13020 assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
13021 }
13022}