1#[cfg(not(feature = "std"))]
16use alloc::{
17 boxed::Box,
18 format,
19 string::{String, ToString},
20 vec,
21 vec::Vec,
22};
23use core::fmt;
24
25use log::debug;
26
27use IsLateral::*;
28use IsOptional::*;
29
30use crate::ast::helpers::stmt_create_table::CreateTableBuilder;
31use crate::ast::*;
32use crate::dialect::*;
33use crate::keywords::{self, Keyword};
34use crate::tokenizer::*;
35
36#[derive(Debug, Clone, PartialEq, Eq)]
37pub enum ParserError {
38 TokenizerError(String),
39 ParserError(String),
40 RecursionLimitExceeded,
41}
42
43macro_rules! parser_err {
45 ($MSG:expr) => {
46 Err(ParserError::ParserError($MSG.to_string()))
47 };
48}
49
50macro_rules! return_ok_if_some {
52 ($e:expr) => {{
53 if let Some(v) = $e {
54 return Ok(v);
55 }
56 }};
57}
58
59#[cfg(feature = "std")]
60mod recursion {
62 use core::sync::atomic::{AtomicUsize, Ordering};
63 use std::rc::Rc;
64
65 use super::ParserError;
66
67 pub(crate) struct RecursionCounter {
76 remaining_depth: Rc<AtomicUsize>,
77 }
78
79 impl RecursionCounter {
80 pub fn new(remaining_depth: usize) -> Self {
83 Self {
84 remaining_depth: Rc::new(remaining_depth.into()),
85 }
86 }
87
88 pub fn try_decrease(&self) -> Result<DepthGuard, ParserError> {
95 let old_value = self.remaining_depth.fetch_sub(1, Ordering::SeqCst);
96 if old_value == 0 {
98 Err(ParserError::RecursionLimitExceeded)
99 } else {
100 Ok(DepthGuard::new(Rc::clone(&self.remaining_depth)))
101 }
102 }
103 }
104
105 pub struct DepthGuard {
107 remaining_depth: Rc<AtomicUsize>,
108 }
109
110 impl DepthGuard {
111 fn new(remaining_depth: Rc<AtomicUsize>) -> Self {
112 Self { remaining_depth }
113 }
114 }
115 impl Drop for DepthGuard {
116 fn drop(&mut self) {
117 self.remaining_depth.fetch_add(1, Ordering::SeqCst);
118 }
119 }
120}
121
122#[cfg(not(feature = "std"))]
123mod recursion {
124 pub(crate) struct RecursionCounter {}
130
131 impl RecursionCounter {
132 pub fn new(_remaining_depth: usize) -> Self {
133 Self {}
134 }
135 pub fn try_decrease(&self) -> Result<DepthGuard, super::ParserError> {
136 Ok(DepthGuard {})
137 }
138 }
139
140 pub struct DepthGuard {}
141}
142
143use recursion::RecursionCounter;
144
145#[derive(PartialEq, Eq)]
146pub enum IsOptional {
147 Optional,
148 Mandatory,
149}
150
151pub enum IsLateral {
152 Lateral,
153 NotLateral,
154}
155
156pub enum WildcardExpr {
157 Expr(Expr),
158 QualifiedWildcard(ObjectName),
159 Wildcard,
160}
161
162impl From<WildcardExpr> for FunctionArgExpr {
163 fn from(wildcard_expr: WildcardExpr) -> Self {
164 match wildcard_expr {
165 WildcardExpr::Expr(expr) => Self::Expr(expr),
166 WildcardExpr::QualifiedWildcard(prefix) => Self::QualifiedWildcard(prefix),
167 WildcardExpr::Wildcard => Self::Wildcard,
168 }
169 }
170}
171
172impl From<TokenizerError> for ParserError {
173 fn from(e: TokenizerError) -> Self {
174 ParserError::TokenizerError(e.to_string())
175 }
176}
177
178impl fmt::Display for ParserError {
179 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
180 write!(
181 f,
182 "sql parser error: {}",
183 match self {
184 ParserError::TokenizerError(s) => s,
185 ParserError::ParserError(s) => s,
186 ParserError::RecursionLimitExceeded => "recursion limit exceeded",
187 }
188 )
189 }
190}
191
192#[cfg(feature = "std")]
193impl std::error::Error for ParserError {}
194
195const DEFAULT_REMAINING_DEPTH: usize = 50;
197
198#[derive(Default)]
199pub struct ParserOptions {
200 pub trailing_commas: bool,
201}
202
203pub struct Parser<'a> {
204 tokens: Vec<TokenWithLocation>,
205 index: usize,
207 dialect: &'a dyn Dialect,
209 options: ParserOptions,
212 recursion_counter: RecursionCounter,
214}
215
216impl<'a> Parser<'a> {
217 pub fn new(dialect: &'a dyn Dialect) -> Self {
233 Self {
234 tokens: vec![],
235 index: 0,
236 dialect,
237 recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH),
238 options: ParserOptions::default(),
239 }
240 }
241
242 pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
263 self.recursion_counter = RecursionCounter::new(recursion_limit);
264 self
265 }
266
267 pub fn with_options(mut self, options: ParserOptions) -> Self {
288 self.options = options;
289 self
290 }
291
292 pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithLocation>) -> Self {
294 self.tokens = tokens;
295 self.index = 0;
296 self
297 }
298
299 pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
301 let tokens_with_locations: Vec<TokenWithLocation> = tokens
303 .into_iter()
304 .map(|token| TokenWithLocation {
305 token,
306 location: Location { line: 0, column: 0 },
307 })
308 .collect();
309 self.with_tokens_with_locations(tokens_with_locations)
310 }
311
312 pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
319 debug!("Parsing sql '{}'...", sql);
320 let mut tokenizer = Tokenizer::new(self.dialect, sql);
321 let tokens = tokenizer.tokenize()?;
322 Ok(self.with_tokens(tokens))
323 }
324
325 pub fn parse_statements(&mut self) -> Result<Vec<Statement>, ParserError> {
341 let mut stmts = Vec::new();
342 let mut expecting_statement_delimiter = false;
343 loop {
344 while self.consume_token(&Token::SemiColon) {
346 expecting_statement_delimiter = false;
347 }
348
349 if self.peek_token() == Token::EOF {
350 break;
351 }
352 if expecting_statement_delimiter {
353 return self.expected("end of statement", self.peek_token());
354 }
355
356 let statement = self.parse_statement()?;
357 stmts.push(statement);
358 expecting_statement_delimiter = true;
359 }
360 Ok(stmts)
361 }
362
363 pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
379 Parser::new(dialect).try_with_sql(sql)?.parse_statements()
380 }
381
382 pub fn parse_statement(&mut self) -> Result<Statement, ParserError> {
385 let _guard = self.recursion_counter.try_decrease()?;
386
387 if let Some(statement) = self.dialect.parse_statement(self) {
389 return statement;
390 }
391
392 let next_token = self.next_token();
393 match &next_token.token {
394 Token::Word(w) => match w.keyword {
395 Keyword::KILL => Ok(self.parse_kill()?),
396 Keyword::DESCRIBE => Ok(self.parse_explain(true)?),
397 Keyword::EXPLAIN => Ok(self.parse_explain(false)?),
398 Keyword::ANALYZE => Ok(self.parse_analyze()?),
399 Keyword::SELECT | Keyword::WITH | Keyword::VALUES => {
400 self.prev_token();
401 Ok(Statement::Query(Box::new(self.parse_query()?)))
402 }
403 Keyword::TRUNCATE => Ok(self.parse_truncate()?),
404 Keyword::MSCK => Ok(self.parse_msck()?),
405 Keyword::CREATE => Ok(self.parse_create()?),
406 Keyword::CACHE => Ok(self.parse_cache_table()?),
407 Keyword::DROP => Ok(self.parse_drop()?),
408 Keyword::DISCARD => Ok(self.parse_discard()?),
409 Keyword::DECLARE => Ok(self.parse_declare()?),
410 Keyword::FETCH => Ok(self.parse_fetch_statement()?),
411 Keyword::DELETE => Ok(self.parse_delete()?),
412 Keyword::INSERT => Ok(self.parse_insert()?),
413 Keyword::UNCACHE => Ok(self.parse_uncache_table()?),
414 Keyword::UPDATE => Ok(self.parse_update()?),
415 Keyword::ALTER => Ok(self.parse_alter()?),
416 Keyword::COPY => Ok(self.parse_copy()?),
417 Keyword::CLOSE => Ok(self.parse_close()?),
418 Keyword::SET => Ok(self.parse_set()?),
419 Keyword::SHOW => Ok(self.parse_show()?),
420 Keyword::USE => Ok(self.parse_use()?),
421 Keyword::GRANT => Ok(self.parse_grant()?),
422 Keyword::REVOKE => Ok(self.parse_revoke()?),
423 Keyword::START => Ok(self.parse_start_transaction()?),
424 Keyword::BEGIN => Ok(self.parse_begin()?),
428 Keyword::SAVEPOINT => Ok(self.parse_savepoint()?),
429 Keyword::COMMIT => Ok(self.parse_commit()?),
430 Keyword::ROLLBACK => Ok(self.parse_rollback()?),
431 Keyword::ASSERT => Ok(self.parse_assert()?),
432 Keyword::DEALLOCATE => Ok(self.parse_deallocate()?),
435 Keyword::EXECUTE => Ok(self.parse_execute()?),
436 Keyword::PREPARE => Ok(self.parse_prepare()?),
437 Keyword::MERGE => Ok(self.parse_merge()?),
438 _ => self.expected("an SQL statement", next_token),
439 },
440 Token::LParen => {
441 self.prev_token();
442 Ok(Statement::Query(Box::new(self.parse_query()?)))
443 }
444 _ => self.expected("an SQL statement", next_token),
445 }
446 }
447
448 pub fn parse_msck(&mut self) -> Result<Statement, ParserError> {
449 let repair = self.parse_keyword(Keyword::REPAIR);
450 self.expect_keyword(Keyword::TABLE)?;
451 let table_name = self.parse_object_name()?;
452 let partition_action = self
453 .maybe_parse(|parser| {
454 let pa = match parser.parse_one_of_keywords(&[
455 Keyword::ADD,
456 Keyword::DROP,
457 Keyword::SYNC,
458 ]) {
459 Some(Keyword::ADD) => Some(AddDropSync::ADD),
460 Some(Keyword::DROP) => Some(AddDropSync::DROP),
461 Some(Keyword::SYNC) => Some(AddDropSync::SYNC),
462 _ => None,
463 };
464 parser.expect_keyword(Keyword::PARTITIONS)?;
465 Ok(pa)
466 })
467 .unwrap_or_default();
468 Ok(Statement::Msck {
469 repair,
470 table_name,
471 partition_action,
472 })
473 }
474
475 pub fn parse_truncate(&mut self) -> Result<Statement, ParserError> {
476 self.expect_keyword(Keyword::TABLE)?;
477 let table_name = self.parse_object_name()?;
478 let mut partitions = None;
479 if self.parse_keyword(Keyword::PARTITION) {
480 self.expect_token(&Token::LParen)?;
481 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
482 self.expect_token(&Token::RParen)?;
483 }
484 Ok(Statement::Truncate {
485 table_name,
486 partitions,
487 })
488 }
489
490 pub fn parse_analyze(&mut self) -> Result<Statement, ParserError> {
491 self.expect_keyword(Keyword::TABLE)?;
492 let table_name = self.parse_object_name()?;
493 let mut for_columns = false;
494 let mut cache_metadata = false;
495 let mut noscan = false;
496 let mut partitions = None;
497 let mut compute_statistics = false;
498 let mut columns = vec![];
499 loop {
500 match self.parse_one_of_keywords(&[
501 Keyword::PARTITION,
502 Keyword::FOR,
503 Keyword::CACHE,
504 Keyword::NOSCAN,
505 Keyword::COMPUTE,
506 ]) {
507 Some(Keyword::PARTITION) => {
508 self.expect_token(&Token::LParen)?;
509 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
510 self.expect_token(&Token::RParen)?;
511 }
512 Some(Keyword::NOSCAN) => noscan = true,
513 Some(Keyword::FOR) => {
514 self.expect_keyword(Keyword::COLUMNS)?;
515
516 columns = self
517 .maybe_parse(|parser| {
518 parser.parse_comma_separated(Parser::parse_identifier)
519 })
520 .unwrap_or_default();
521 for_columns = true
522 }
523 Some(Keyword::CACHE) => {
524 self.expect_keyword(Keyword::METADATA)?;
525 cache_metadata = true
526 }
527 Some(Keyword::COMPUTE) => {
528 self.expect_keyword(Keyword::STATISTICS)?;
529 compute_statistics = true
530 }
531 _ => break,
532 }
533 }
534
535 Ok(Statement::Analyze {
536 table_name,
537 for_columns,
538 columns,
539 partitions,
540 cache_metadata,
541 noscan,
542 compute_statistics,
543 })
544 }
545
546 pub fn parse_wildcard_expr(&mut self) -> Result<WildcardExpr, ParserError> {
548 let index = self.index;
549
550 let next_token = self.next_token();
551 match next_token.token {
552 Token::Word(w) if self.peek_token().token == Token::Period => {
553 let mut id_parts: Vec<Ident> = vec![w.to_ident()];
554
555 while self.consume_token(&Token::Period) {
556 let next_token = self.next_token();
557 match next_token.token {
558 Token::Word(w) => id_parts.push(w.to_ident()),
559 Token::Mul => {
560 return Ok(WildcardExpr::QualifiedWildcard(ObjectName(id_parts)));
561 }
562 _ => {
563 return self.expected("an identifier or a '*' after '.'", next_token);
564 }
565 }
566 }
567 }
568 Token::Mul => {
569 return Ok(WildcardExpr::Wildcard);
570 }
571 _ => (),
572 };
573
574 self.index = index;
575 self.parse_expr().map(WildcardExpr::Expr)
576 }
577
578 pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
580 let _guard = self.recursion_counter.try_decrease()?;
581 self.parse_subexpr(0)
582 }
583
584 pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
586 debug!("parsing expr");
587 let mut expr = self.parse_prefix()?;
588 debug!("prefix: {:?}", expr);
589 loop {
590 let next_precedence = self.get_next_precedence()?;
591 debug!("next precedence: {:?}", next_precedence);
592
593 if precedence >= next_precedence {
594 break;
595 }
596
597 expr = self.parse_infix(expr, next_precedence)?;
598 }
599 Ok(expr)
600 }
601
602 pub fn parse_interval_expr(&mut self) -> Result<Expr, ParserError> {
603 let precedence = 0;
604 let mut expr = self.parse_prefix()?;
605
606 loop {
607 let next_precedence = self.get_next_interval_precedence()?;
608
609 if precedence >= next_precedence {
610 break;
611 }
612
613 expr = self.parse_infix(expr, next_precedence)?;
614 }
615
616 Ok(expr)
617 }
618
619 pub fn get_next_interval_precedence(&self) -> Result<u8, ParserError> {
622 let token = self.peek_token();
623
624 match token.token {
625 Token::Word(w) if w.keyword == Keyword::AND => Ok(0),
626 Token::Word(w) if w.keyword == Keyword::OR => Ok(0),
627 Token::Word(w) if w.keyword == Keyword::XOR => Ok(0),
628 _ => self.get_next_precedence(),
629 }
630 }
631
632 pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
633 let condition = self.parse_expr()?;
634 let message = if self.parse_keyword(Keyword::AS) {
635 Some(self.parse_expr()?)
636 } else {
637 None
638 };
639
640 Ok(Statement::Assert { condition, message })
641 }
642
643 pub fn parse_savepoint(&mut self) -> Result<Statement, ParserError> {
644 let name = self.parse_identifier()?;
645 Ok(Statement::Savepoint { name })
646 }
647
648 pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
650 if let Some(prefix) = self.dialect.parse_prefix(self) {
652 return prefix;
653 }
654
655 return_ok_if_some!(self.maybe_parse(|parser| {
672 match parser.parse_data_type()? {
673 DataType::Interval => parser.parse_interval(),
674 DataType::Custom(..) => parser_err!("dummy"),
682 data_type => Ok(Expr::TypedString {
683 data_type,
684 value: parser.parse_literal_string()?,
685 }),
686 }
687 }));
688
689 let next_token = self.next_token();
690 let expr = match next_token.token {
691 Token::Word(w) => match w.keyword {
692 Keyword::TRUE | Keyword::FALSE | Keyword::NULL => {
693 self.prev_token();
694 Ok(Expr::Value(self.parse_value()?))
695 }
696 Keyword::CURRENT_CATALOG
697 | Keyword::CURRENT_USER
698 | Keyword::SESSION_USER
699 | Keyword::USER
700 if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
701 {
702 Ok(Expr::Function(Function {
703 name: ObjectName(vec![w.to_ident()]),
704 args: vec![],
705 over: None,
706 distinct: false,
707 special: true,
708 }))
709 }
710 Keyword::CURRENT_TIMESTAMP
711 | Keyword::CURRENT_TIME
712 | Keyword::CURRENT_DATE
713 | Keyword::LOCALTIME
714 | Keyword::LOCALTIMESTAMP => {
715 self.parse_time_functions(ObjectName(vec![w.to_ident()]))
716 }
717 Keyword::CASE => self.parse_case_expr(),
718 Keyword::CAST => self.parse_cast_expr(),
719 Keyword::TRY_CAST => self.parse_try_cast_expr(),
720 Keyword::SAFE_CAST => self.parse_safe_cast_expr(),
721 Keyword::EXISTS => self.parse_exists_expr(false),
722 Keyword::EXTRACT => self.parse_extract_expr(),
723 Keyword::CEIL => self.parse_ceil_floor_expr(true),
724 Keyword::FLOOR => self.parse_ceil_floor_expr(false),
725 Keyword::POSITION => self.parse_position_expr(),
726 Keyword::SUBSTRING => self.parse_substring_expr(),
727 Keyword::OVERLAY => self.parse_overlay_expr(),
728 Keyword::TRIM => self.parse_trim_expr(),
729 Keyword::INTERVAL => self.parse_interval(),
730 Keyword::LISTAGG => self.parse_listagg_expr(),
731 Keyword::ARRAY if self.peek_token() == Token::LBracket => {
733 self.expect_token(&Token::LBracket)?;
734 self.parse_array_expr(true)
735 }
736 Keyword::ARRAY
737 if self.peek_token() == Token::LParen
738 && !dialect_of!(self is ClickHouseDialect) =>
739 {
740 self.expect_token(&Token::LParen)?;
741 self.parse_array_subquery()
742 }
743 Keyword::ARRAY_AGG => self.parse_array_agg_expr(),
744 Keyword::NOT => self.parse_not(),
745 Keyword::MATCH if dialect_of!(self is MySqlDialect | GenericDialect) => {
746 self.parse_match_against()
747 }
748 _ => match self.peek_token().token {
751 Token::LParen | Token::Period => {
752 let mut id_parts: Vec<Ident> = vec![w.to_ident()];
753 while self.consume_token(&Token::Period) {
754 let next_token = self.next_token();
755 match next_token.token {
756 Token::Word(w) => id_parts.push(w.to_ident()),
757 _ => {
758 return self
759 .expected("an identifier or a '*' after '.'", next_token);
760 }
761 }
762 }
763
764 if self.consume_token(&Token::LParen) {
765 self.prev_token();
766 self.parse_function(ObjectName(id_parts))
767 } else {
768 Ok(Expr::CompoundIdentifier(id_parts))
769 }
770 }
771 Token::SingleQuotedString(_)
773 | Token::DoubleQuotedString(_)
774 | Token::HexStringLiteral(_)
775 if w.value.starts_with('_') =>
776 {
777 Ok(Expr::IntroducedString {
778 introducer: w.value,
779 value: self.parse_introduced_string_value()?,
780 })
781 }
782 _ => Ok(Expr::Identifier(w.to_ident())),
783 },
784 }, Token::LBracket => self.parse_array_expr(false),
787 tok @ Token::Minus | tok @ Token::Plus => {
788 let op = if tok == Token::Plus {
789 UnaryOperator::Plus
790 } else {
791 UnaryOperator::Minus
792 };
793 Ok(Expr::UnaryOp {
794 op,
795 expr: Box::new(self.parse_subexpr(Self::PLUS_MINUS_PREC)?),
796 })
797 }
798 tok @ Token::DoubleExclamationMark
799 | tok @ Token::PGSquareRoot
800 | tok @ Token::PGCubeRoot
801 | tok @ Token::AtSign
802 | tok @ Token::Tilde
803 if dialect_of!(self is PostgreSqlDialect) =>
804 {
805 let op = match tok {
806 Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial,
807 Token::PGSquareRoot => UnaryOperator::PGSquareRoot,
808 Token::PGCubeRoot => UnaryOperator::PGCubeRoot,
809 Token::AtSign => UnaryOperator::PGAbs,
810 Token::Tilde => UnaryOperator::PGBitwiseNot,
811 _ => unreachable!(),
812 };
813 Ok(Expr::UnaryOp {
814 op,
815 expr: Box::new(self.parse_subexpr(Self::PLUS_MINUS_PREC)?),
816 })
817 }
818 Token::EscapedStringLiteral(_) if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
819 {
820 self.prev_token();
821 Ok(Expr::Value(self.parse_value()?))
822 }
823 Token::Number(_, _)
824 | Token::SingleQuotedString(_)
825 | Token::DoubleQuotedString(_)
826 | Token::DollarQuotedString(_)
827 | Token::SingleQuotedByteStringLiteral(_)
828 | Token::DoubleQuotedByteStringLiteral(_)
829 | Token::RawStringLiteral(_)
830 | Token::NationalStringLiteral(_)
831 | Token::HexStringLiteral(_) => {
832 self.prev_token();
833 Ok(Expr::Value(self.parse_value()?))
834 }
835 Token::LParen => {
836 let expr =
837 if self.parse_keyword(Keyword::SELECT) || self.parse_keyword(Keyword::WITH) {
838 self.prev_token();
839 Expr::Subquery(Box::new(self.parse_query()?))
840 } else {
841 let exprs = self.parse_comma_separated(Parser::parse_expr)?;
842 match exprs.len() {
843 0 => unreachable!(), 1 => Expr::Nested(Box::new(exprs.into_iter().next().unwrap())),
845 _ => Expr::Tuple(exprs),
846 }
847 };
848 self.expect_token(&Token::RParen)?;
849 if !self.consume_token(&Token::Period) {
850 Ok(expr)
851 } else {
852 let tok = self.next_token();
853 let key = match tok.token {
854 Token::Word(word) => word.to_ident(),
855 _ => return parser_err!(format!("Expected identifier, found: {tok}")),
856 };
857 Ok(Expr::CompositeAccess {
858 expr: Box::new(expr),
859 key,
860 })
861 }
862 }
863 Token::Placeholder(_) | Token::Colon | Token::AtSign => {
864 self.prev_token();
865 Ok(Expr::Value(self.parse_value()?))
866 }
867 _ => self.expected("an expression:", next_token),
868 }?;
869
870 if self.parse_keyword(Keyword::COLLATE) {
871 Ok(Expr::Collate {
872 expr: Box::new(expr),
873 collation: self.parse_object_name()?,
874 })
875 } else {
876 Ok(expr)
877 }
878 }
879
880 pub fn parse_function(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
881 self.expect_token(&Token::LParen)?;
882 let distinct = self.parse_all_or_distinct()?;
883 let args = self.parse_optional_args()?;
884 let over = if self.parse_keyword(Keyword::OVER) {
885 self.expect_token(&Token::LParen)?;
887 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
888 self.parse_comma_separated(Parser::parse_expr)?
890 } else {
891 vec![]
892 };
893 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
894 self.parse_comma_separated(Parser::parse_order_by_expr)?
895 } else {
896 vec![]
897 };
898 let window_frame = if !self.consume_token(&Token::RParen) {
899 let window_frame = self.parse_window_frame()?;
900 self.expect_token(&Token::RParen)?;
901 Some(window_frame)
902 } else {
903 None
904 };
905
906 Some(WindowSpec {
907 partition_by,
908 order_by,
909 window_frame,
910 })
911 } else {
912 None
913 };
914 Ok(Expr::Function(Function {
915 name,
916 args,
917 over,
918 distinct,
919 special: false,
920 }))
921 }
922
923 pub fn parse_time_functions(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
924 let args = if self.consume_token(&Token::LParen) {
925 self.parse_optional_args()?
926 } else {
927 vec![]
928 };
929 Ok(Expr::Function(Function {
930 name,
931 args,
932 over: None,
933 distinct: false,
934 special: false,
935 }))
936 }
937
938 pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
939 let next_token = self.next_token();
940 match &next_token.token {
941 Token::Word(w) => match w.keyword {
942 Keyword::ROWS => Ok(WindowFrameUnits::Rows),
943 Keyword::RANGE => Ok(WindowFrameUnits::Range),
944 Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
945 _ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
946 },
947 _ => self.expected("ROWS, RANGE, GROUPS", next_token),
948 }
949 }
950
951 pub fn parse_window_frame(&mut self) -> Result<WindowFrame, ParserError> {
952 let units = self.parse_window_frame_units()?;
953 let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) {
954 let start_bound = self.parse_window_frame_bound()?;
955 self.expect_keyword(Keyword::AND)?;
956 let end_bound = Some(self.parse_window_frame_bound()?);
957 (start_bound, end_bound)
958 } else {
959 (self.parse_window_frame_bound()?, None)
960 };
961 Ok(WindowFrame {
962 units,
963 start_bound,
964 end_bound,
965 })
966 }
967
968 pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
970 if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
971 Ok(WindowFrameBound::CurrentRow)
972 } else {
973 let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
974 None
975 } else {
976 Some(Box::new(match self.peek_token().token {
977 Token::SingleQuotedString(_) => self.parse_interval()?,
978 _ => self.parse_expr()?,
979 }))
980 };
981 if self.parse_keyword(Keyword::PRECEDING) {
982 Ok(WindowFrameBound::Preceding(rows))
983 } else if self.parse_keyword(Keyword::FOLLOWING) {
984 Ok(WindowFrameBound::Following(rows))
985 } else {
986 self.expected("PRECEDING or FOLLOWING", self.peek_token())
987 }
988 }
989 }
990
991 fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
994 if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
995 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
996 self.expect_token(&Token::LParen)?;
997 let result = self.parse_comma_separated(|p| p.parse_tuple(false, true))?;
998 self.expect_token(&Token::RParen)?;
999 Ok(Expr::GroupingSets(result))
1000 } else if self.parse_keyword(Keyword::CUBE) {
1001 self.expect_token(&Token::LParen)?;
1002 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
1003 self.expect_token(&Token::RParen)?;
1004 Ok(Expr::Cube(result))
1005 } else if self.parse_keyword(Keyword::ROLLUP) {
1006 self.expect_token(&Token::LParen)?;
1007 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
1008 self.expect_token(&Token::RParen)?;
1009 Ok(Expr::Rollup(result))
1010 } else {
1011 self.parse_expr()
1012 }
1013 } else {
1014 self.parse_expr()
1016 }
1017 }
1018
1019 fn parse_tuple(
1023 &mut self,
1024 lift_singleton: bool,
1025 allow_empty: bool,
1026 ) -> Result<Vec<Expr>, ParserError> {
1027 if lift_singleton {
1028 if self.consume_token(&Token::LParen) {
1029 let result = if allow_empty && self.consume_token(&Token::RParen) {
1030 vec![]
1031 } else {
1032 let result = self.parse_comma_separated(Parser::parse_expr)?;
1033 self.expect_token(&Token::RParen)?;
1034 result
1035 };
1036 Ok(result)
1037 } else {
1038 Ok(vec![self.parse_expr()?])
1039 }
1040 } else {
1041 self.expect_token(&Token::LParen)?;
1042 let result = if allow_empty && self.consume_token(&Token::RParen) {
1043 vec![]
1044 } else {
1045 let result = self.parse_comma_separated(Parser::parse_expr)?;
1046 self.expect_token(&Token::RParen)?;
1047 result
1048 };
1049 Ok(result)
1050 }
1051 }
1052
1053 pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
1054 let mut operand = None;
1055 if !self.parse_keyword(Keyword::WHEN) {
1056 operand = Some(Box::new(self.parse_expr()?));
1057 self.expect_keyword(Keyword::WHEN)?;
1058 }
1059 let mut conditions = vec![];
1060 let mut results = vec![];
1061 loop {
1062 conditions.push(self.parse_expr()?);
1063 self.expect_keyword(Keyword::THEN)?;
1064 results.push(self.parse_expr()?);
1065 if !self.parse_keyword(Keyword::WHEN) {
1066 break;
1067 }
1068 }
1069 let else_result = if self.parse_keyword(Keyword::ELSE) {
1070 Some(Box::new(self.parse_expr()?))
1071 } else {
1072 None
1073 };
1074 self.expect_keyword(Keyword::END)?;
1075 Ok(Expr::Case {
1076 operand,
1077 conditions,
1078 results,
1079 else_result,
1080 })
1081 }
1082
1083 pub fn parse_cast_expr(&mut self) -> Result<Expr, ParserError> {
1085 self.expect_token(&Token::LParen)?;
1086 let expr = self.parse_expr()?;
1087 self.expect_keyword(Keyword::AS)?;
1088 let data_type = self.parse_data_type()?;
1089 self.expect_token(&Token::RParen)?;
1090 Ok(Expr::Cast {
1091 expr: Box::new(expr),
1092 data_type,
1093 })
1094 }
1095
1096 pub fn parse_try_cast_expr(&mut self) -> Result<Expr, ParserError> {
1098 self.expect_token(&Token::LParen)?;
1099 let expr = self.parse_expr()?;
1100 self.expect_keyword(Keyword::AS)?;
1101 let data_type = self.parse_data_type()?;
1102 self.expect_token(&Token::RParen)?;
1103 Ok(Expr::TryCast {
1104 expr: Box::new(expr),
1105 data_type,
1106 })
1107 }
1108
1109 pub fn parse_safe_cast_expr(&mut self) -> Result<Expr, ParserError> {
1111 self.expect_token(&Token::LParen)?;
1112 let expr = self.parse_expr()?;
1113 self.expect_keyword(Keyword::AS)?;
1114 let data_type = self.parse_data_type()?;
1115 self.expect_token(&Token::RParen)?;
1116 Ok(Expr::SafeCast {
1117 expr: Box::new(expr),
1118 data_type,
1119 })
1120 }
1121
1122 pub fn parse_exists_expr(&mut self, negated: bool) -> Result<Expr, ParserError> {
1124 self.expect_token(&Token::LParen)?;
1125 let exists_node = Expr::Exists {
1126 negated,
1127 subquery: Box::new(self.parse_query()?),
1128 };
1129 self.expect_token(&Token::RParen)?;
1130 Ok(exists_node)
1131 }
1132
1133 pub fn parse_extract_expr(&mut self) -> Result<Expr, ParserError> {
1134 self.expect_token(&Token::LParen)?;
1135 let field = self.parse_date_time_field()?;
1136 self.expect_keyword(Keyword::FROM)?;
1137 let expr = self.parse_expr()?;
1138 self.expect_token(&Token::RParen)?;
1139 Ok(Expr::Extract {
1140 field,
1141 expr: Box::new(expr),
1142 })
1143 }
1144
1145 pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result<Expr, ParserError> {
1146 self.expect_token(&Token::LParen)?;
1147 let expr = self.parse_expr()?;
1148 let mut field = DateTimeField::NoDateTime;
1150 let keyword_to = self.parse_keyword(Keyword::TO);
1151 if keyword_to {
1152 field = self.parse_date_time_field()?;
1154 }
1155 self.expect_token(&Token::RParen)?;
1156 if is_ceil {
1157 Ok(Expr::Ceil {
1158 expr: Box::new(expr),
1159 field,
1160 })
1161 } else {
1162 Ok(Expr::Floor {
1163 expr: Box::new(expr),
1164 field,
1165 })
1166 }
1167 }
1168
1169 pub fn parse_position_expr(&mut self) -> Result<Expr, ParserError> {
1170 self.expect_token(&Token::LParen)?;
1172
1173 let expr = self.parse_subexpr(Self::BETWEEN_PREC)?;
1175 if self.parse_keyword(Keyword::IN) {
1176 let from = self.parse_expr()?;
1177 self.expect_token(&Token::RParen)?;
1178 Ok(Expr::Position {
1179 expr: Box::new(expr),
1180 r#in: Box::new(from),
1181 })
1182 } else {
1183 parser_err!("Position function must include IN keyword".to_string())
1184 }
1185 }
1186
1187 pub fn parse_substring_expr(&mut self) -> Result<Expr, ParserError> {
1188 self.expect_token(&Token::LParen)?;
1190 let expr = self.parse_expr()?;
1191 let mut from_expr = None;
1192 if self.parse_keyword(Keyword::FROM) || self.consume_token(&Token::Comma) {
1193 from_expr = Some(self.parse_expr()?);
1194 }
1195
1196 let mut to_expr = None;
1197 if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) {
1198 to_expr = Some(self.parse_expr()?);
1199 }
1200 self.expect_token(&Token::RParen)?;
1201
1202 Ok(Expr::Substring {
1203 expr: Box::new(expr),
1204 substring_from: from_expr.map(Box::new),
1205 substring_for: to_expr.map(Box::new),
1206 })
1207 }
1208
1209 pub fn parse_overlay_expr(&mut self) -> Result<Expr, ParserError> {
1210 self.expect_token(&Token::LParen)?;
1212 let expr = self.parse_expr()?;
1213 self.expect_keyword(Keyword::PLACING)?;
1214 let what_expr = self.parse_expr()?;
1215 self.expect_keyword(Keyword::FROM)?;
1216 let from_expr = self.parse_expr()?;
1217 let mut for_expr = None;
1218 if self.parse_keyword(Keyword::FOR) {
1219 for_expr = Some(self.parse_expr()?);
1220 }
1221 self.expect_token(&Token::RParen)?;
1222
1223 Ok(Expr::Overlay {
1224 expr: Box::new(expr),
1225 overlay_what: Box::new(what_expr),
1226 overlay_from: Box::new(from_expr),
1227 overlay_for: for_expr.map(Box::new),
1228 })
1229 }
1230
1231 pub fn parse_trim_expr(&mut self) -> Result<Expr, ParserError> {
1236 self.expect_token(&Token::LParen)?;
1237 let mut trim_where = None;
1238 if let Token::Word(word) = self.peek_token().token {
1239 if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING]
1240 .iter()
1241 .any(|d| word.keyword == *d)
1242 {
1243 trim_where = Some(self.parse_trim_where()?);
1244 }
1245 }
1246 let expr = self.parse_expr()?;
1247 if self.parse_keyword(Keyword::FROM) {
1248 let trim_what = Box::new(expr);
1249 let expr = self.parse_expr()?;
1250 self.expect_token(&Token::RParen)?;
1251 Ok(Expr::Trim {
1252 expr: Box::new(expr),
1253 trim_where,
1254 trim_what: Some(trim_what),
1255 })
1256 } else {
1257 self.expect_token(&Token::RParen)?;
1258 Ok(Expr::Trim {
1259 expr: Box::new(expr),
1260 trim_where,
1261 trim_what: None,
1262 })
1263 }
1264 }
1265
1266 pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
1267 let next_token = self.next_token();
1268 match &next_token.token {
1269 Token::Word(w) => match w.keyword {
1270 Keyword::BOTH => Ok(TrimWhereField::Both),
1271 Keyword::LEADING => Ok(TrimWhereField::Leading),
1272 Keyword::TRAILING => Ok(TrimWhereField::Trailing),
1273 _ => self.expected("trim_where field", next_token)?,
1274 },
1275 _ => self.expected("trim_where field", next_token),
1276 }
1277 }
1278
1279 pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
1282 if self.peek_token().token == Token::RBracket {
1283 let _ = self.next_token(); Ok(Expr::Array(Array {
1285 elem: vec![],
1286 named,
1287 }))
1288 } else {
1289 let exprs = self.parse_comma_separated(Parser::parse_expr)?;
1290 self.expect_token(&Token::RBracket)?;
1291 Ok(Expr::Array(Array { elem: exprs, named }))
1292 }
1293 }
1294
1295 pub fn parse_array_subquery(&mut self) -> Result<Expr, ParserError> {
1297 let query = self.parse_query()?;
1298 self.expect_token(&Token::RParen)?;
1299 Ok(Expr::ArraySubquery(Box::new(query)))
1300 }
1301
1302 pub fn parse_listagg_expr(&mut self) -> Result<Expr, ParserError> {
1304 self.expect_token(&Token::LParen)?;
1305 let distinct = self.parse_all_or_distinct()?;
1306 let expr = Box::new(self.parse_expr()?);
1307 let separator = if self.consume_token(&Token::Comma) {
1310 Some(Box::new(self.parse_expr()?))
1311 } else {
1312 None
1313 };
1314 let on_overflow = if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
1315 if self.parse_keyword(Keyword::ERROR) {
1316 Some(ListAggOnOverflow::Error)
1317 } else {
1318 self.expect_keyword(Keyword::TRUNCATE)?;
1319 let filler = match self.peek_token().token {
1320 Token::Word(w)
1321 if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT =>
1322 {
1323 None
1324 }
1325 Token::SingleQuotedString(_)
1326 | Token::EscapedStringLiteral(_)
1327 | Token::NationalStringLiteral(_)
1328 | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)),
1329 _ => self.expected(
1330 "either filler, WITH, or WITHOUT in LISTAGG",
1331 self.peek_token(),
1332 )?,
1333 };
1334 let with_count = self.parse_keyword(Keyword::WITH);
1335 if !with_count && !self.parse_keyword(Keyword::WITHOUT) {
1336 self.expected("either WITH or WITHOUT in LISTAGG", self.peek_token())?;
1337 }
1338 self.expect_keyword(Keyword::COUNT)?;
1339 Some(ListAggOnOverflow::Truncate { filler, with_count })
1340 }
1341 } else {
1342 None
1343 };
1344 self.expect_token(&Token::RParen)?;
1345 let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
1348 self.expect_token(&Token::LParen)?;
1349 self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
1350 let order_by_expr = self.parse_comma_separated(Parser::parse_order_by_expr)?;
1351 self.expect_token(&Token::RParen)?;
1352 order_by_expr
1353 } else {
1354 vec![]
1355 };
1356 Ok(Expr::ListAgg(ListAgg {
1357 distinct,
1358 expr,
1359 separator,
1360 on_overflow,
1361 within_group,
1362 }))
1363 }
1364
1365 pub fn parse_array_agg_expr(&mut self) -> Result<Expr, ParserError> {
1366 self.expect_token(&Token::LParen)?;
1367 let distinct = self.parse_keyword(Keyword::DISTINCT);
1368 let expr = Box::new(self.parse_expr()?);
1369 if !self.dialect.supports_within_after_array_aggregation() {
1371 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
1372 let order_by_expr = self.parse_order_by_expr()?;
1373 Some(Box::new(order_by_expr))
1374 } else {
1375 None
1376 };
1377 let limit = if self.parse_keyword(Keyword::LIMIT) {
1378 self.parse_limit()?.map(Box::new)
1379 } else {
1380 None
1381 };
1382 self.expect_token(&Token::RParen)?;
1383 return Ok(Expr::ArrayAgg(ArrayAgg {
1384 distinct,
1385 expr,
1386 order_by,
1387 limit,
1388 within_group: false,
1389 }));
1390 }
1391 self.expect_token(&Token::RParen)?;
1394 let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
1395 self.expect_token(&Token::LParen)?;
1396 self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
1397 let order_by_expr = self.parse_order_by_expr()?;
1398 self.expect_token(&Token::RParen)?;
1399 Some(Box::new(order_by_expr))
1400 } else {
1401 None
1402 };
1403
1404 Ok(Expr::ArrayAgg(ArrayAgg {
1405 distinct,
1406 expr,
1407 order_by: within_group,
1408 limit: None,
1409 within_group: true,
1410 }))
1411 }
1412
1413 pub fn parse_date_time_field(&mut self) -> Result<DateTimeField, ParserError> {
1418 let next_token = self.next_token();
1419 match &next_token.token {
1420 Token::Word(w) => match w.keyword {
1421 Keyword::YEAR => Ok(DateTimeField::Year),
1422 Keyword::MONTH => Ok(DateTimeField::Month),
1423 Keyword::WEEK => Ok(DateTimeField::Week),
1424 Keyword::DAY => Ok(DateTimeField::Day),
1425 Keyword::DATE => Ok(DateTimeField::Date),
1426 Keyword::HOUR => Ok(DateTimeField::Hour),
1427 Keyword::MINUTE => Ok(DateTimeField::Minute),
1428 Keyword::SECOND => Ok(DateTimeField::Second),
1429 Keyword::CENTURY => Ok(DateTimeField::Century),
1430 Keyword::DECADE => Ok(DateTimeField::Decade),
1431 Keyword::DOY => Ok(DateTimeField::Doy),
1432 Keyword::DOW => Ok(DateTimeField::Dow),
1433 Keyword::EPOCH => Ok(DateTimeField::Epoch),
1434 Keyword::ISODOW => Ok(DateTimeField::Isodow),
1435 Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
1436 Keyword::JULIAN => Ok(DateTimeField::Julian),
1437 Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
1438 Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
1439 Keyword::MILLENIUM => Ok(DateTimeField::Millenium),
1440 Keyword::MILLENNIUM => Ok(DateTimeField::Millennium),
1441 Keyword::MILLISECOND => Ok(DateTimeField::Millisecond),
1442 Keyword::MILLISECONDS => Ok(DateTimeField::Milliseconds),
1443 Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
1444 Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
1445 Keyword::QUARTER => Ok(DateTimeField::Quarter),
1446 Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
1447 Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
1448 Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
1449 _ => self.expected("date/time field", next_token),
1450 },
1451 _ => self.expected("date/time field", next_token),
1452 }
1453 }
1454
1455 pub fn parse_not(&mut self) -> Result<Expr, ParserError> {
1456 match self.peek_token().token {
1457 Token::Word(w) => match w.keyword {
1458 Keyword::EXISTS => {
1459 let negated = true;
1460 let _ = self.parse_keyword(Keyword::EXISTS);
1461 self.parse_exists_expr(negated)
1462 }
1463 _ => Ok(Expr::UnaryOp {
1464 op: UnaryOperator::Not,
1465 expr: Box::new(self.parse_subexpr(Self::UNARY_NOT_PREC)?),
1466 }),
1467 },
1468 _ => Ok(Expr::UnaryOp {
1469 op: UnaryOperator::Not,
1470 expr: Box::new(self.parse_subexpr(Self::UNARY_NOT_PREC)?),
1471 }),
1472 }
1473 }
1474
1475 pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
1483 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
1484
1485 self.expect_keyword(Keyword::AGAINST)?;
1486
1487 self.expect_token(&Token::LParen)?;
1488
1489 let match_value = self.parse_value()?;
1491
1492 let in_natural_language_mode_keywords = &[
1493 Keyword::IN,
1494 Keyword::NATURAL,
1495 Keyword::LANGUAGE,
1496 Keyword::MODE,
1497 ];
1498
1499 let with_query_expansion_keywords = &[Keyword::WITH, Keyword::QUERY, Keyword::EXPANSION];
1500
1501 let in_boolean_mode_keywords = &[Keyword::IN, Keyword::BOOLEAN, Keyword::MODE];
1502
1503 let opt_search_modifier = if self.parse_keywords(in_natural_language_mode_keywords) {
1504 if self.parse_keywords(with_query_expansion_keywords) {
1505 Some(SearchModifier::InNaturalLanguageModeWithQueryExpansion)
1506 } else {
1507 Some(SearchModifier::InNaturalLanguageMode)
1508 }
1509 } else if self.parse_keywords(in_boolean_mode_keywords) {
1510 Some(SearchModifier::InBooleanMode)
1511 } else if self.parse_keywords(with_query_expansion_keywords) {
1512 Some(SearchModifier::WithQueryExpansion)
1513 } else {
1514 None
1515 };
1516
1517 self.expect_token(&Token::RParen)?;
1518
1519 Ok(Expr::MatchAgainst {
1520 columns,
1521 match_value,
1522 opt_search_modifier,
1523 })
1524 }
1525
1526 pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
1540 let value = self.parse_interval_expr()?;
1548
1549 let leading_field = match self.peek_token().token {
1555 Token::Word(kw)
1556 if [
1557 Keyword::YEAR,
1558 Keyword::MONTH,
1559 Keyword::WEEK,
1560 Keyword::DAY,
1561 Keyword::HOUR,
1562 Keyword::MINUTE,
1563 Keyword::SECOND,
1564 Keyword::CENTURY,
1565 Keyword::DECADE,
1566 Keyword::DOW,
1567 Keyword::DOY,
1568 Keyword::EPOCH,
1569 Keyword::ISODOW,
1570 Keyword::ISOYEAR,
1571 Keyword::JULIAN,
1572 Keyword::MICROSECOND,
1573 Keyword::MICROSECONDS,
1574 Keyword::MILLENIUM,
1575 Keyword::MILLENNIUM,
1576 Keyword::MILLISECOND,
1577 Keyword::MILLISECONDS,
1578 Keyword::NANOSECOND,
1579 Keyword::NANOSECONDS,
1580 Keyword::QUARTER,
1581 Keyword::TIMEZONE,
1582 Keyword::TIMEZONE_HOUR,
1583 Keyword::TIMEZONE_MINUTE,
1584 ]
1585 .iter()
1586 .any(|d| kw.keyword == *d) =>
1587 {
1588 Some(self.parse_date_time_field()?)
1589 }
1590 _ => None,
1591 };
1592
1593 let (leading_precision, last_field, fsec_precision) =
1594 if leading_field == Some(DateTimeField::Second) {
1595 let last_field = None;
1601 let (leading_precision, fsec_precision) = self.parse_optional_precision_scale()?;
1602 (leading_precision, last_field, fsec_precision)
1603 } else {
1604 let leading_precision = self.parse_optional_precision()?;
1605 if self.parse_keyword(Keyword::TO) {
1606 let last_field = Some(self.parse_date_time_field()?);
1607 let fsec_precision = if last_field == Some(DateTimeField::Second) {
1608 self.parse_optional_precision()?
1609 } else {
1610 None
1611 };
1612 (leading_precision, last_field, fsec_precision)
1613 } else {
1614 (leading_precision, None, None)
1615 }
1616 };
1617
1618 Ok(Expr::Interval {
1619 value: Box::new(value),
1620 leading_field,
1621 leading_precision,
1622 last_field,
1623 fractional_seconds_precision: fsec_precision,
1624 })
1625 }
1626
1627 pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result<Expr, ParserError> {
1629 if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) {
1631 return infix;
1632 }
1633
1634 let tok = self.next_token();
1635
1636 let regular_binary_operator = match &tok.token {
1637 Token::Spaceship => Some(BinaryOperator::Spaceship),
1638 Token::DoubleEq => Some(BinaryOperator::Eq),
1639 Token::Eq => Some(BinaryOperator::Eq),
1640 Token::Neq => Some(BinaryOperator::NotEq),
1641 Token::Gt => Some(BinaryOperator::Gt),
1642 Token::GtEq => Some(BinaryOperator::GtEq),
1643 Token::Lt => Some(BinaryOperator::Lt),
1644 Token::LtEq => Some(BinaryOperator::LtEq),
1645 Token::Plus => Some(BinaryOperator::Plus),
1646 Token::Minus => Some(BinaryOperator::Minus),
1647 Token::Mul => Some(BinaryOperator::Multiply),
1648 Token::Mod => Some(BinaryOperator::Modulo),
1649 Token::StringConcat => Some(BinaryOperator::StringConcat),
1650 Token::Pipe => Some(BinaryOperator::BitwiseOr),
1651 Token::Caret => {
1652 if dialect_of!(self is PostgreSqlDialect) {
1655 Some(BinaryOperator::PGExp)
1656 } else {
1657 Some(BinaryOperator::BitwiseXor)
1658 }
1659 }
1660 Token::Ampersand => Some(BinaryOperator::BitwiseAnd),
1661 Token::Div => Some(BinaryOperator::Divide),
1662 Token::ShiftLeft if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
1663 Some(BinaryOperator::PGBitwiseShiftLeft)
1664 }
1665 Token::ShiftRight if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
1666 Some(BinaryOperator::PGBitwiseShiftRight)
1667 }
1668 Token::Sharp if dialect_of!(self is PostgreSqlDialect) => {
1669 Some(BinaryOperator::PGBitwiseXor)
1670 }
1671 Token::Tilde => Some(BinaryOperator::PGRegexMatch),
1672 Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch),
1673 Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch),
1674 Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch),
1675 Token::Word(w) => match w.keyword {
1676 Keyword::AND => Some(BinaryOperator::And),
1677 Keyword::OR => Some(BinaryOperator::Or),
1678 Keyword::XOR => Some(BinaryOperator::Xor),
1679 Keyword::OPERATOR if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
1680 self.expect_token(&Token::LParen)?;
1681 let mut idents = vec![];
1686 loop {
1687 idents.push(self.next_token().to_string());
1688 if !self.consume_token(&Token::Period) {
1689 break;
1690 }
1691 }
1692 self.expect_token(&Token::RParen)?;
1693 Some(BinaryOperator::PGCustomBinaryOperator(idents))
1694 }
1695 _ => None,
1696 },
1697 _ => None,
1698 };
1699
1700 if let Some(op) = regular_binary_operator {
1701 if let Some(keyword) = self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL]) {
1702 self.expect_token(&Token::LParen)?;
1703 let right = self.parse_subexpr(precedence)?;
1704 self.expect_token(&Token::RParen)?;
1705
1706 let right = match keyword {
1707 Keyword::ALL => Box::new(Expr::AllOp(Box::new(right))),
1708 Keyword::ANY => Box::new(Expr::AnyOp(Box::new(right))),
1709 _ => unreachable!(),
1710 };
1711
1712 Ok(Expr::BinaryOp {
1713 left: Box::new(expr),
1714 op,
1715 right,
1716 })
1717 } else {
1718 Ok(Expr::BinaryOp {
1719 left: Box::new(expr),
1720 op,
1721 right: Box::new(self.parse_subexpr(precedence)?),
1722 })
1723 }
1724 } else if let Token::Word(w) = &tok.token {
1725 match w.keyword {
1726 Keyword::IS => {
1727 if self.parse_keyword(Keyword::NULL) {
1728 Ok(Expr::IsNull(Box::new(expr)))
1729 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
1730 Ok(Expr::IsNotNull(Box::new(expr)))
1731 } else if self.parse_keywords(&[Keyword::TRUE]) {
1732 Ok(Expr::IsTrue(Box::new(expr)))
1733 } else if self.parse_keywords(&[Keyword::NOT, Keyword::TRUE]) {
1734 Ok(Expr::IsNotTrue(Box::new(expr)))
1735 } else if self.parse_keywords(&[Keyword::FALSE]) {
1736 Ok(Expr::IsFalse(Box::new(expr)))
1737 } else if self.parse_keywords(&[Keyword::NOT, Keyword::FALSE]) {
1738 Ok(Expr::IsNotFalse(Box::new(expr)))
1739 } else if self.parse_keywords(&[Keyword::UNKNOWN]) {
1740 Ok(Expr::IsUnknown(Box::new(expr)))
1741 } else if self.parse_keywords(&[Keyword::NOT, Keyword::UNKNOWN]) {
1742 Ok(Expr::IsNotUnknown(Box::new(expr)))
1743 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::FROM]) {
1744 let expr2 = self.parse_expr()?;
1745 Ok(Expr::IsDistinctFrom(Box::new(expr), Box::new(expr2)))
1746 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DISTINCT, Keyword::FROM])
1747 {
1748 let expr2 = self.parse_expr()?;
1749 Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
1750 } else {
1751 self.expected(
1752 "[NOT] NULL or TRUE|FALSE or [NOT] DISTINCT FROM after IS",
1753 self.peek_token(),
1754 )
1755 }
1756 }
1757 Keyword::AT => {
1758 if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE]) {
1761 let time_zone = self.next_token();
1762 match time_zone.token {
1763 Token::SingleQuotedString(time_zone) => {
1764 log::trace!("Peek token: {:?}", self.peek_token());
1765 Ok(Expr::AtTimeZone {
1766 timestamp: Box::new(expr),
1767 time_zone,
1768 })
1769 }
1770 _ => self.expected(
1771 "Expected Token::SingleQuotedString after AT TIME ZONE",
1772 time_zone,
1773 ),
1774 }
1775 } else {
1776 self.expected("Expected Token::Word after AT", tok)
1777 }
1778 }
1779 Keyword::NOT
1780 | Keyword::IN
1781 | Keyword::BETWEEN
1782 | Keyword::LIKE
1783 | Keyword::ILIKE
1784 | Keyword::SIMILAR => {
1785 self.prev_token();
1786 let negated = self.parse_keyword(Keyword::NOT);
1787 if self.parse_keyword(Keyword::IN) {
1788 self.parse_in(expr, negated)
1789 } else if self.parse_keyword(Keyword::BETWEEN) {
1790 self.parse_between(expr, negated)
1791 } else if self.parse_keyword(Keyword::LIKE) {
1792 Ok(Expr::Like {
1793 negated,
1794 expr: Box::new(expr),
1795 pattern: Box::new(self.parse_subexpr(Self::LIKE_PREC)?),
1796 escape_char: self.parse_escape_char()?,
1797 })
1798 } else if self.parse_keyword(Keyword::ILIKE) {
1799 Ok(Expr::ILike {
1800 negated,
1801 expr: Box::new(expr),
1802 pattern: Box::new(self.parse_subexpr(Self::LIKE_PREC)?),
1803 escape_char: self.parse_escape_char()?,
1804 })
1805 } else if self.parse_keywords(&[Keyword::SIMILAR, Keyword::TO]) {
1806 Ok(Expr::SimilarTo {
1807 negated,
1808 expr: Box::new(expr),
1809 pattern: Box::new(self.parse_subexpr(Self::LIKE_PREC)?),
1810 escape_char: self.parse_escape_char()?,
1811 })
1812 } else {
1813 self.expected("IN or BETWEEN after NOT", self.peek_token())
1814 }
1815 }
1816 _ => parser_err!(format!("No infix parser for token {:?}", tok.token)),
1818 }
1819 } else if Token::DoubleColon == tok {
1820 self.parse_pg_cast(expr)
1821 } else if Token::ExclamationMark == tok {
1822 Ok(Expr::UnaryOp {
1824 op: UnaryOperator::PGPostfixFactorial,
1825 expr: Box::new(expr),
1826 })
1827 } else if Token::LBracket == tok {
1828 if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
1829 return self.parse_array_index(expr);
1831 }
1832 self.parse_map_access(expr)
1833 } else if Token::Colon == tok {
1834 Ok(Expr::JsonAccess {
1835 left: Box::new(expr),
1836 operator: JsonOperator::Colon,
1837 right: Box::new(Expr::Value(self.parse_value()?)),
1838 })
1839 } else if Token::Arrow == tok
1840 || Token::LongArrow == tok
1841 || Token::HashArrow == tok
1842 || Token::HashLongArrow == tok
1843 || Token::AtArrow == tok
1844 || Token::ArrowAt == tok
1845 || Token::HashMinus == tok
1846 || Token::AtQuestion == tok
1847 || Token::AtAt == tok
1848 {
1849 let operator = match tok.token {
1850 Token::Arrow => JsonOperator::Arrow,
1851 Token::LongArrow => JsonOperator::LongArrow,
1852 Token::HashArrow => JsonOperator::HashArrow,
1853 Token::HashLongArrow => JsonOperator::HashLongArrow,
1854 Token::AtArrow => JsonOperator::AtArrow,
1855 Token::ArrowAt => JsonOperator::ArrowAt,
1856 Token::HashMinus => JsonOperator::HashMinus,
1857 Token::AtQuestion => JsonOperator::AtQuestion,
1858 Token::AtAt => JsonOperator::AtAt,
1859 _ => unreachable!(),
1860 };
1861 Ok(Expr::JsonAccess {
1862 left: Box::new(expr),
1863 operator,
1864 right: Box::new(self.parse_expr()?),
1865 })
1866 } else {
1867 parser_err!(format!("No infix parser for token {:?}", tok.token))
1869 }
1870 }
1871
1872 pub fn parse_escape_char(&mut self) -> Result<Option<char>, ParserError> {
1874 if self.parse_keyword(Keyword::ESCAPE) {
1875 Ok(Some(self.parse_literal_char()?))
1876 } else {
1877 Ok(None)
1878 }
1879 }
1880
1881 pub fn parse_array_index(&mut self, expr: Expr) -> Result<Expr, ParserError> {
1882 let index = self.parse_expr()?;
1883 self.expect_token(&Token::RBracket)?;
1884 let mut indexes: Vec<Expr> = vec![index];
1885 while self.consume_token(&Token::LBracket) {
1886 let index = self.parse_expr()?;
1887 self.expect_token(&Token::RBracket)?;
1888 indexes.push(index);
1889 }
1890 Ok(Expr::ArrayIndex {
1891 obj: Box::new(expr),
1892 indexes,
1893 })
1894 }
1895
1896 pub fn parse_map_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
1897 let key = self.parse_map_key()?;
1898 let tok = self.consume_token(&Token::RBracket);
1899 debug!("Tok: {}", tok);
1900 let mut key_parts: Vec<Expr> = vec![key];
1901 while self.consume_token(&Token::LBracket) {
1902 let key = self.parse_map_key()?;
1903 let tok = self.consume_token(&Token::RBracket);
1904 debug!("Tok: {}", tok);
1905 key_parts.push(key);
1906 }
1907 match expr {
1908 e @ Expr::Identifier(_) | e @ Expr::CompoundIdentifier(_) => Ok(Expr::MapAccess {
1909 column: Box::new(e),
1910 keys: key_parts,
1911 }),
1912 _ => Ok(expr),
1913 }
1914 }
1915
1916 pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
1918 if self.parse_keyword(Keyword::UNNEST) {
1921 self.expect_token(&Token::LParen)?;
1922 let array_expr = self.parse_expr()?;
1923 self.expect_token(&Token::RParen)?;
1924 return Ok(Expr::InUnnest {
1925 expr: Box::new(expr),
1926 array_expr: Box::new(array_expr),
1927 negated,
1928 });
1929 }
1930 self.expect_token(&Token::LParen)?;
1931 let in_op = if self.parse_keyword(Keyword::SELECT) || self.parse_keyword(Keyword::WITH) {
1932 self.prev_token();
1933 Expr::InSubquery {
1934 expr: Box::new(expr),
1935 subquery: Box::new(self.parse_query()?),
1936 negated,
1937 }
1938 } else {
1939 Expr::InList {
1940 expr: Box::new(expr),
1941 list: self.parse_comma_separated(Parser::parse_expr)?,
1942 negated,
1943 }
1944 };
1945 self.expect_token(&Token::RParen)?;
1946 Ok(in_op)
1947 }
1948
1949 pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
1951 let low = self.parse_subexpr(Self::BETWEEN_PREC)?;
1954 self.expect_keyword(Keyword::AND)?;
1955 let high = self.parse_subexpr(Self::BETWEEN_PREC)?;
1956 Ok(Expr::Between {
1957 expr: Box::new(expr),
1958 negated,
1959 low: Box::new(low),
1960 high: Box::new(high),
1961 })
1962 }
1963
1964 pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
1966 Ok(Expr::Cast {
1967 expr: Box::new(expr),
1968 data_type: self.parse_data_type()?,
1969 })
1970 }
1971
1972 const PLUS_MINUS_PREC: u8 = 30;
1974 const XOR_PREC: u8 = 24;
1975 const TIME_ZONE_PREC: u8 = 20;
1976 const BETWEEN_PREC: u8 = 20;
1977 const LIKE_PREC: u8 = 19;
1978 const IS_PREC: u8 = 17;
1979 const UNARY_NOT_PREC: u8 = 15;
1980 const AND_PREC: u8 = 10;
1981 const OR_PREC: u8 = 5;
1982
1983 pub fn get_next_precedence(&self) -> Result<u8, ParserError> {
1985 if let Some(precedence) = self.dialect.get_next_precedence(self) {
1987 return precedence;
1988 }
1989
1990 let token = self.peek_token();
1991 debug!("get_next_precedence() {:?}", token);
1992 let token_0 = self.peek_nth_token(0);
1993 let token_1 = self.peek_nth_token(1);
1994 let token_2 = self.peek_nth_token(2);
1995 debug!("0: {token_0} 1: {token_1} 2: {token_2}");
1996 match token.token {
1997 Token::Word(w) if w.keyword == Keyword::OR => Ok(Self::OR_PREC),
1998 Token::Word(w) if w.keyword == Keyword::AND => Ok(Self::AND_PREC),
1999 Token::Word(w) if w.keyword == Keyword::XOR => Ok(Self::XOR_PREC),
2000
2001 Token::Word(w) if w.keyword == Keyword::AT => {
2002 match (self.peek_nth_token(1).token, self.peek_nth_token(2).token) {
2003 (Token::Word(w), Token::Word(w2))
2004 if w.keyword == Keyword::TIME && w2.keyword == Keyword::ZONE =>
2005 {
2006 Ok(Self::TIME_ZONE_PREC)
2007 }
2008 _ => Ok(0),
2009 }
2010 }
2011
2012 Token::Word(w) if w.keyword == Keyword::NOT => match self.peek_nth_token(1).token {
2013 Token::Word(w) if w.keyword == Keyword::IN => Ok(Self::BETWEEN_PREC),
2019 Token::Word(w) if w.keyword == Keyword::BETWEEN => Ok(Self::BETWEEN_PREC),
2020 Token::Word(w) if w.keyword == Keyword::LIKE => Ok(Self::LIKE_PREC),
2021 Token::Word(w) if w.keyword == Keyword::ILIKE => Ok(Self::LIKE_PREC),
2022 Token::Word(w) if w.keyword == Keyword::SIMILAR => Ok(Self::LIKE_PREC),
2023 _ => Ok(0),
2024 },
2025 Token::Word(w) if w.keyword == Keyword::IS => Ok(Self::IS_PREC),
2026 Token::Word(w) if w.keyword == Keyword::IN => Ok(Self::BETWEEN_PREC),
2027 Token::Word(w) if w.keyword == Keyword::BETWEEN => Ok(Self::BETWEEN_PREC),
2028 Token::Word(w) if w.keyword == Keyword::LIKE => Ok(Self::LIKE_PREC),
2029 Token::Word(w) if w.keyword == Keyword::ILIKE => Ok(Self::LIKE_PREC),
2030 Token::Word(w) if w.keyword == Keyword::SIMILAR => Ok(Self::LIKE_PREC),
2031 Token::Word(w) if w.keyword == Keyword::OPERATOR => Ok(Self::BETWEEN_PREC),
2032 Token::Eq
2033 | Token::Lt
2034 | Token::LtEq
2035 | Token::Neq
2036 | Token::Gt
2037 | Token::GtEq
2038 | Token::DoubleEq
2039 | Token::Tilde
2040 | Token::TildeAsterisk
2041 | Token::ExclamationMarkTilde
2042 | Token::ExclamationMarkTildeAsterisk
2043 | Token::Spaceship => Ok(20),
2044 Token::Pipe => Ok(21),
2045 Token::Caret | Token::Sharp | Token::ShiftRight | Token::ShiftLeft => Ok(22),
2046 Token::Ampersand => Ok(23),
2047 Token::Plus | Token::Minus => Ok(Self::PLUS_MINUS_PREC),
2048 Token::Mul | Token::Div | Token::Mod | Token::StringConcat => Ok(40),
2049 Token::DoubleColon => Ok(50),
2050 Token::Colon => Ok(50),
2051 Token::ExclamationMark => Ok(50),
2052 Token::LBracket
2053 | Token::LongArrow
2054 | Token::Arrow
2055 | Token::HashArrow
2056 | Token::HashLongArrow
2057 | Token::AtArrow
2058 | Token::ArrowAt
2059 | Token::HashMinus
2060 | Token::AtQuestion
2061 | Token::AtAt => Ok(50),
2062 _ => Ok(0),
2063 }
2064 }
2065
2066 pub fn peek_token(&self) -> TokenWithLocation {
2069 self.peek_nth_token(0)
2070 }
2071
2072 pub fn peek_nth_token(&self, mut n: usize) -> TokenWithLocation {
2074 let mut index = self.index;
2075 loop {
2076 index += 1;
2077 match self.tokens.get(index - 1) {
2078 Some(TokenWithLocation {
2079 token: Token::Whitespace(_),
2080 location: _,
2081 }) => continue,
2082 non_whitespace => {
2083 if n == 0 {
2084 return non_whitespace.cloned().unwrap_or(TokenWithLocation {
2085 token: Token::EOF,
2086 location: Location { line: 0, column: 0 },
2087 });
2088 }
2089 n -= 1;
2090 }
2091 }
2092 }
2093 }
2094
2095 pub fn next_token(&mut self) -> TokenWithLocation {
2099 loop {
2100 self.index += 1;
2101 match self.tokens.get(self.index - 1) {
2102 Some(TokenWithLocation {
2103 token: Token::Whitespace(_),
2104 location: _,
2105 }) => continue,
2106 token => {
2107 return token
2108 .cloned()
2109 .unwrap_or_else(|| TokenWithLocation::wrap(Token::EOF))
2110 }
2111 }
2112 }
2113 }
2114
2115 pub fn next_token_no_skip(&mut self) -> Option<&TokenWithLocation> {
2117 self.index += 1;
2118 self.tokens.get(self.index - 1)
2119 }
2120
2121 pub fn prev_token(&mut self) {
2125 loop {
2126 assert!(self.index > 0);
2127 self.index -= 1;
2128 if let Some(TokenWithLocation {
2129 token: Token::Whitespace(_),
2130 location: _,
2131 }) = self.tokens.get(self.index)
2132 {
2133 continue;
2134 }
2135 return;
2136 }
2137 }
2138
2139 pub fn expected<T>(&self, expected: &str, found: TokenWithLocation) -> Result<T, ParserError> {
2141 parser_err!(format!("Expected {expected}, found: {found}"))
2142 }
2143
2144 #[must_use]
2146 pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
2147 match self.peek_token().token {
2148 Token::Word(w) if expected == w.keyword => {
2149 self.next_token();
2150 true
2151 }
2152 _ => false,
2153 }
2154 }
2155
2156 #[must_use]
2158 pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
2159 let index = self.index;
2160 for &keyword in keywords {
2161 if !self.parse_keyword(keyword) {
2162 self.index = index;
2165 return false;
2166 }
2167 }
2168 true
2169 }
2170
2171 #[must_use]
2173 pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
2174 match self.peek_token().token {
2175 Token::Word(w) => {
2176 keywords
2177 .iter()
2178 .find(|keyword| **keyword == w.keyword)
2179 .map(|keyword| {
2180 self.next_token();
2181 *keyword
2182 })
2183 }
2184 _ => None,
2185 }
2186 }
2187
2188 pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result<Keyword, ParserError> {
2190 if let Some(keyword) = self.parse_one_of_keywords(keywords) {
2191 Ok(keyword)
2192 } else {
2193 let keywords: Vec<String> = keywords.iter().map(|x| format!("{x:?}")).collect();
2194 self.expected(
2195 &format!("one of {}", keywords.join(" or ")),
2196 self.peek_token(),
2197 )
2198 }
2199 }
2200
2201 pub fn expect_keyword(&mut self, expected: Keyword) -> Result<(), ParserError> {
2203 if self.parse_keyword(expected) {
2204 Ok(())
2205 } else {
2206 self.expected(format!("{:?}", &expected).as_str(), self.peek_token())
2207 }
2208 }
2209
2210 pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> {
2213 for &kw in expected {
2214 self.expect_keyword(kw)?;
2215 }
2216 Ok(())
2217 }
2218
2219 #[must_use]
2221 pub fn consume_token(&mut self, expected: &Token) -> bool {
2222 if self.peek_token() == *expected {
2223 self.next_token();
2224 true
2225 } else {
2226 false
2227 }
2228 }
2229
2230 pub fn expect_token(&mut self, expected: &Token) -> Result<(), ParserError> {
2232 if self.consume_token(expected) {
2233 Ok(())
2234 } else {
2235 self.expected(&expected.to_string(), self.peek_token())
2236 }
2237 }
2238
2239 pub fn parse_projection(&mut self) -> Result<Vec<SelectItem>, ParserError> {
2241 let old_value = self.options.trailing_commas;
2248 self.options.trailing_commas |= dialect_of!(self is BigQueryDialect);
2249
2250 let ret = self.parse_comma_separated(|p| p.parse_select_item());
2251 self.options.trailing_commas = old_value;
2252
2253 ret
2254 }
2255
2256 pub fn parse_comma_separated<T, F>(&mut self, mut f: F) -> Result<Vec<T>, ParserError>
2258 where
2259 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
2260 {
2261 let mut values = vec![];
2262 loop {
2263 values.push(f(self)?);
2264 if !self.consume_token(&Token::Comma) {
2265 break;
2266 } else if self.options.trailing_commas {
2267 match self.peek_token().token {
2268 Token::Word(kw)
2269 if keywords::RESERVED_FOR_COLUMN_ALIAS
2270 .iter()
2271 .any(|d| kw.keyword == *d) =>
2272 {
2273 break;
2274 }
2275 Token::RParen
2276 | Token::SemiColon
2277 | Token::EOF
2278 | Token::RBracket
2279 | Token::RBrace => break,
2280 _ => continue,
2281 }
2282 }
2283 }
2284 Ok(values)
2285 }
2286
2287 #[must_use]
2290 fn maybe_parse<T, F>(&mut self, mut f: F) -> Option<T>
2291 where
2292 F: FnMut(&mut Parser) -> Result<T, ParserError>,
2293 {
2294 let index = self.index;
2295 if let Ok(t) = f(self) {
2296 Some(t)
2297 } else {
2298 self.index = index;
2299 None
2300 }
2301 }
2302
2303 pub fn parse_all_or_distinct(&mut self) -> Result<bool, ParserError> {
2306 let all = self.parse_keyword(Keyword::ALL);
2307 let distinct = self.parse_keyword(Keyword::DISTINCT);
2308 if all && distinct {
2309 parser_err!("Cannot specify both ALL and DISTINCT".to_string())
2310 } else {
2311 Ok(distinct)
2312 }
2313 }
2314
2315 pub fn parse_create(&mut self) -> Result<Statement, ParserError> {
2317 let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
2318 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
2319 let global = self.parse_one_of_keywords(&[Keyword::GLOBAL]).is_some();
2320 let transient = self.parse_one_of_keywords(&[Keyword::TRANSIENT]).is_some();
2321 let global: Option<bool> = if global {
2322 Some(true)
2323 } else if local {
2324 Some(false)
2325 } else {
2326 None
2327 };
2328 let temporary = self
2329 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
2330 .is_some();
2331 if self.parse_keyword(Keyword::TABLE) {
2332 self.parse_create_table(or_replace, temporary, global, transient)
2333 } else if self.parse_keyword(Keyword::MATERIALIZED) || self.parse_keyword(Keyword::VIEW) {
2334 self.prev_token();
2335 self.parse_create_view(or_replace)
2336 } else if self.parse_keyword(Keyword::EXTERNAL) {
2337 self.parse_create_external_table(or_replace)
2338 } else if self.parse_keyword(Keyword::FUNCTION) {
2339 self.parse_create_function(or_replace, temporary)
2340 } else if or_replace {
2341 self.expected(
2342 "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION after CREATE OR REPLACE",
2343 self.peek_token(),
2344 )
2345 } else if self.parse_keyword(Keyword::INDEX) {
2346 self.parse_create_index(false)
2347 } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
2348 self.parse_create_index(true)
2349 } else if self.parse_keyword(Keyword::VIRTUAL) {
2350 self.parse_create_virtual_table()
2351 } else if self.parse_keyword(Keyword::SCHEMA) {
2352 self.parse_create_schema()
2353 } else if self.parse_keyword(Keyword::DATABASE) {
2354 self.parse_create_database()
2355 } else if self.parse_keyword(Keyword::ROLE) {
2356 self.parse_create_role()
2357 } else if self.parse_keyword(Keyword::SEQUENCE) {
2358 self.parse_create_sequence(temporary)
2359 } else {
2360 self.expected("an object type after CREATE", self.peek_token())
2361 }
2362 }
2363
2364 pub fn parse_cache_table(&mut self) -> Result<Statement, ParserError> {
2366 let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None);
2367 if self.parse_keyword(Keyword::TABLE) {
2368 let table_name = self.parse_object_name()?;
2369 if self.peek_token().token != Token::EOF {
2370 if let Token::Word(word) = self.peek_token().token {
2371 if word.keyword == Keyword::OPTIONS {
2372 options = self.parse_options(Keyword::OPTIONS)?
2373 }
2374 };
2375
2376 if self.peek_token().token != Token::EOF {
2377 let (a, q) = self.parse_as_query()?;
2378 has_as = a;
2379 query = Some(q);
2380 }
2381
2382 Ok(Statement::Cache {
2383 table_flag,
2384 table_name,
2385 has_as,
2386 options,
2387 query,
2388 })
2389 } else {
2390 Ok(Statement::Cache {
2391 table_flag,
2392 table_name,
2393 has_as,
2394 options,
2395 query,
2396 })
2397 }
2398 } else {
2399 table_flag = Some(self.parse_object_name()?);
2400 if self.parse_keyword(Keyword::TABLE) {
2401 let table_name = self.parse_object_name()?;
2402 if self.peek_token() != Token::EOF {
2403 if let Token::Word(word) = self.peek_token().token {
2404 if word.keyword == Keyword::OPTIONS {
2405 options = self.parse_options(Keyword::OPTIONS)?
2406 }
2407 };
2408
2409 if self.peek_token() != Token::EOF {
2410 let (a, q) = self.parse_as_query()?;
2411 has_as = a;
2412 query = Some(q);
2413 }
2414
2415 Ok(Statement::Cache {
2416 table_flag,
2417 table_name,
2418 has_as,
2419 options,
2420 query,
2421 })
2422 } else {
2423 Ok(Statement::Cache {
2424 table_flag,
2425 table_name,
2426 has_as,
2427 options,
2428 query,
2429 })
2430 }
2431 } else {
2432 if self.peek_token() == Token::EOF {
2433 self.prev_token();
2434 }
2435 self.expected("a `TABLE` keyword", self.peek_token())
2436 }
2437 }
2438 }
2439
2440 pub fn parse_as_query(&mut self) -> Result<(bool, Query), ParserError> {
2442 match self.peek_token().token {
2443 Token::Word(word) => match word.keyword {
2444 Keyword::AS => {
2445 self.next_token();
2446 Ok((true, self.parse_query()?))
2447 }
2448 _ => Ok((false, self.parse_query()?)),
2449 },
2450 _ => self.expected("a QUERY statement", self.peek_token()),
2451 }
2452 }
2453
2454 pub fn parse_uncache_table(&mut self) -> Result<Statement, ParserError> {
2456 let has_table = self.parse_keyword(Keyword::TABLE);
2457 if has_table {
2458 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
2459 let table_name = self.parse_object_name()?;
2460 if self.peek_token().token == Token::EOF {
2461 Ok(Statement::UNCache {
2462 table_name,
2463 if_exists,
2464 })
2465 } else {
2466 self.expected("an `EOF`", self.peek_token())
2467 }
2468 } else {
2469 self.expected("a `TABLE` keyword", self.peek_token())
2470 }
2471 }
2472
2473 pub fn parse_create_virtual_table(&mut self) -> Result<Statement, ParserError> {
2475 self.expect_keyword(Keyword::TABLE)?;
2476 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
2477 let table_name = self.parse_object_name()?;
2478 self.expect_keyword(Keyword::USING)?;
2479 let module_name = self.parse_identifier()?;
2480 let module_args = self.parse_parenthesized_column_list(Optional, false)?;
2485 Ok(Statement::CreateVirtualTable {
2486 name: table_name,
2487 if_not_exists,
2488 module_name,
2489 module_args,
2490 })
2491 }
2492
2493 pub fn parse_create_schema(&mut self) -> Result<Statement, ParserError> {
2494 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
2495
2496 let schema_name = self.parse_schema_name()?;
2497
2498 Ok(Statement::CreateSchema {
2499 schema_name,
2500 if_not_exists,
2501 })
2502 }
2503
2504 fn parse_schema_name(&mut self) -> Result<SchemaName, ParserError> {
2505 if self.parse_keyword(Keyword::AUTHORIZATION) {
2506 Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?))
2507 } else {
2508 let name = self.parse_object_name()?;
2509
2510 if self.parse_keyword(Keyword::AUTHORIZATION) {
2511 Ok(SchemaName::NamedAuthorization(
2512 name,
2513 self.parse_identifier()?,
2514 ))
2515 } else {
2516 Ok(SchemaName::Simple(name))
2517 }
2518 }
2519 }
2520
2521 pub fn parse_create_database(&mut self) -> Result<Statement, ParserError> {
2522 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
2523 let db_name = self.parse_object_name()?;
2524 let mut location = None;
2525 let mut managed_location = None;
2526 loop {
2527 match self.parse_one_of_keywords(&[Keyword::LOCATION, Keyword::MANAGEDLOCATION]) {
2528 Some(Keyword::LOCATION) => location = Some(self.parse_literal_string()?),
2529 Some(Keyword::MANAGEDLOCATION) => {
2530 managed_location = Some(self.parse_literal_string()?)
2531 }
2532 _ => break,
2533 }
2534 }
2535 Ok(Statement::CreateDatabase {
2536 db_name,
2537 if_not_exists: ine,
2538 location,
2539 managed_location,
2540 })
2541 }
2542
2543 pub fn parse_optional_create_function_using(
2544 &mut self,
2545 ) -> Result<Option<CreateFunctionUsing>, ParserError> {
2546 if !self.parse_keyword(Keyword::USING) {
2547 return Ok(None);
2548 };
2549 let keyword =
2550 self.expect_one_of_keywords(&[Keyword::JAR, Keyword::FILE, Keyword::ARCHIVE])?;
2551
2552 let uri = self.parse_literal_string()?;
2553
2554 match keyword {
2555 Keyword::JAR => Ok(Some(CreateFunctionUsing::Jar(uri))),
2556 Keyword::FILE => Ok(Some(CreateFunctionUsing::File(uri))),
2557 Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
2558 _ => self.expected(
2559 "JAR, FILE or ARCHIVE, got {:?}",
2560 TokenWithLocation::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
2561 ),
2562 }
2563 }
2564
2565 pub fn parse_create_function(
2566 &mut self,
2567 or_replace: bool,
2568 temporary: bool,
2569 ) -> Result<Statement, ParserError> {
2570 if dialect_of!(self is HiveDialect) {
2571 let name = self.parse_object_name()?;
2572 self.expect_keyword(Keyword::AS)?;
2573 let class_name = self.parse_function_definition()?;
2574 let params = CreateFunctionBody {
2575 as_: Some(class_name),
2576 using: self.parse_optional_create_function_using()?,
2577 ..Default::default()
2578 };
2579
2580 Ok(Statement::CreateFunction {
2581 or_replace,
2582 temporary,
2583 name,
2584 args: None,
2585 return_type: None,
2586 params,
2587 })
2588 } else if dialect_of!(self is PostgreSqlDialect) {
2589 let name = self.parse_object_name()?;
2590 self.expect_token(&Token::LParen)?;
2591 let args = if self.consume_token(&Token::RParen) {
2592 self.prev_token();
2593 None
2594 } else {
2595 Some(self.parse_comma_separated(Parser::parse_function_arg)?)
2596 };
2597
2598 self.expect_token(&Token::RParen)?;
2599
2600 let return_type = if self.parse_keyword(Keyword::RETURNS) {
2601 Some(self.parse_data_type()?)
2602 } else {
2603 None
2604 };
2605
2606 let params = self.parse_create_function_body()?;
2607
2608 Ok(Statement::CreateFunction {
2609 or_replace,
2610 temporary,
2611 name,
2612 args,
2613 return_type,
2614 params,
2615 })
2616 } else {
2617 self.prev_token();
2618 self.expected("an object type after CREATE", self.peek_token())
2619 }
2620 }
2621
2622 fn parse_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
2623 let mode = if self.parse_keyword(Keyword::IN) {
2624 Some(ArgMode::In)
2625 } else if self.parse_keyword(Keyword::OUT) {
2626 Some(ArgMode::Out)
2627 } else if self.parse_keyword(Keyword::INOUT) {
2628 Some(ArgMode::InOut)
2629 } else {
2630 None
2631 };
2632
2633 let mut name = None;
2635 let mut data_type = self.parse_data_type()?;
2636 if let DataType::Custom(n, _) = &data_type {
2637 name = Some(n.0[0].clone());
2639 data_type = self.parse_data_type()?;
2640 }
2641
2642 let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq)
2643 {
2644 Some(self.parse_expr()?)
2645 } else {
2646 None
2647 };
2648 Ok(OperateFunctionArg {
2649 mode,
2650 name,
2651 data_type,
2652 default_expr,
2653 })
2654 }
2655
2656 fn parse_create_function_body(&mut self) -> Result<CreateFunctionBody, ParserError> {
2657 let mut body = CreateFunctionBody::default();
2658 loop {
2659 fn ensure_not_set<T>(field: &Option<T>, name: &str) -> Result<(), ParserError> {
2660 if field.is_some() {
2661 return Err(ParserError::ParserError(format!(
2662 "{name} specified more than once",
2663 )));
2664 }
2665 Ok(())
2666 }
2667 if self.parse_keyword(Keyword::AS) {
2668 ensure_not_set(&body.as_, "AS")?;
2669 body.as_ = Some(self.parse_function_definition()?);
2670 } else if self.parse_keyword(Keyword::LANGUAGE) {
2671 ensure_not_set(&body.language, "LANGUAGE")?;
2672 body.language = Some(self.parse_identifier()?);
2673 } else if self.parse_keyword(Keyword::IMMUTABLE) {
2674 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
2675 body.behavior = Some(FunctionBehavior::Immutable);
2676 } else if self.parse_keyword(Keyword::STABLE) {
2677 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
2678 body.behavior = Some(FunctionBehavior::Stable);
2679 } else if self.parse_keyword(Keyword::VOLATILE) {
2680 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
2681 body.behavior = Some(FunctionBehavior::Volatile);
2682 } else if self.parse_keyword(Keyword::RETURN) {
2683 ensure_not_set(&body.return_, "RETURN")?;
2684 body.return_ = Some(self.parse_expr()?);
2685 } else {
2686 return Ok(body);
2687 }
2688 }
2689 }
2690
2691 pub fn parse_create_external_table(
2692 &mut self,
2693 or_replace: bool,
2694 ) -> Result<Statement, ParserError> {
2695 self.expect_keyword(Keyword::TABLE)?;
2696 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
2697 let table_name = self.parse_object_name()?;
2698 let (columns, constraints) = self.parse_columns()?;
2699
2700 let hive_distribution = self.parse_hive_distribution()?;
2701 let hive_formats = self.parse_hive_formats()?;
2702
2703 let file_format = if let Some(ff) = &hive_formats.storage {
2704 match ff {
2705 HiveIOFormat::FileFormat { format } => Some(*format),
2706 _ => None,
2707 }
2708 } else {
2709 None
2710 };
2711 let location = hive_formats.location.clone();
2712 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
2713 Ok(CreateTableBuilder::new(table_name)
2714 .columns(columns)
2715 .constraints(constraints)
2716 .hive_distribution(hive_distribution)
2717 .hive_formats(Some(hive_formats))
2718 .table_properties(table_properties)
2719 .or_replace(or_replace)
2720 .if_not_exists(if_not_exists)
2721 .external(true)
2722 .file_format(file_format)
2723 .location(location)
2724 .build())
2725 }
2726
2727 pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
2728 let next_token = self.next_token();
2729 match &next_token.token {
2730 Token::Word(w) => match w.keyword {
2731 Keyword::AVRO => Ok(FileFormat::AVRO),
2732 Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
2733 Keyword::ORC => Ok(FileFormat::ORC),
2734 Keyword::PARQUET => Ok(FileFormat::PARQUET),
2735 Keyword::RCFILE => Ok(FileFormat::RCFILE),
2736 Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
2737 Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
2738 _ => self.expected("fileformat", next_token),
2739 },
2740 _ => self.expected("fileformat", next_token),
2741 }
2742 }
2743
2744 pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
2745 let next_token = self.next_token();
2746 match &next_token.token {
2747 Token::Word(w) => match w.keyword {
2748 Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
2749 Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
2750 Keyword::JSON => Ok(AnalyzeFormat::JSON),
2751 _ => self.expected("fileformat", next_token),
2752 },
2753 _ => self.expected("fileformat", next_token),
2754 }
2755 }
2756
2757 pub fn parse_create_view(&mut self, or_replace: bool) -> Result<Statement, ParserError> {
2758 let materialized = self.parse_keyword(Keyword::MATERIALIZED);
2759 self.expect_keyword(Keyword::VIEW)?;
2760 let name = self.parse_object_name()?;
2763 let columns = self.parse_parenthesized_column_list(Optional, false)?;
2764 let with_options = self.parse_options(Keyword::WITH)?;
2765
2766 let cluster_by = if self.parse_keyword(Keyword::CLUSTER) {
2767 self.expect_keyword(Keyword::BY)?;
2768 self.parse_parenthesized_column_list(Optional, false)?
2769 } else {
2770 vec![]
2771 };
2772
2773 self.expect_keyword(Keyword::AS)?;
2774 let query = Box::new(self.parse_query()?);
2775 Ok(Statement::CreateView {
2777 name,
2778 columns,
2779 query,
2780 materialized,
2781 or_replace,
2782 with_options,
2783 cluster_by,
2784 })
2785 }
2786
2787 pub fn parse_create_role(&mut self) -> Result<Statement, ParserError> {
2788 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
2789 let names = self.parse_comma_separated(Parser::parse_object_name)?;
2790
2791 let _ = self.parse_keyword(Keyword::WITH); let optional_keywords = if dialect_of!(self is MsSqlDialect) {
2794 vec![Keyword::AUTHORIZATION]
2795 } else if dialect_of!(self is PostgreSqlDialect) {
2796 vec![
2797 Keyword::LOGIN,
2798 Keyword::NOLOGIN,
2799 Keyword::INHERIT,
2800 Keyword::NOINHERIT,
2801 Keyword::BYPASSRLS,
2802 Keyword::NOBYPASSRLS,
2803 Keyword::PASSWORD,
2804 Keyword::CREATEDB,
2805 Keyword::NOCREATEDB,
2806 Keyword::CREATEROLE,
2807 Keyword::NOCREATEROLE,
2808 Keyword::SUPERUSER,
2809 Keyword::NOSUPERUSER,
2810 Keyword::REPLICATION,
2811 Keyword::NOREPLICATION,
2812 Keyword::CONNECTION,
2813 Keyword::VALID,
2814 Keyword::IN,
2815 Keyword::ROLE,
2816 Keyword::ADMIN,
2817 Keyword::USER,
2818 ]
2819 } else {
2820 vec![]
2821 };
2822
2823 let mut authorization_owner = None;
2825 let mut login = None;
2827 let mut inherit = None;
2828 let mut bypassrls = None;
2829 let mut password = None;
2830 let mut create_db = None;
2831 let mut create_role = None;
2832 let mut superuser = None;
2833 let mut replication = None;
2834 let mut connection_limit = None;
2835 let mut valid_until = None;
2836 let mut in_role = vec![];
2837 let mut in_group = vec![];
2838 let mut role = vec![];
2839 let mut user = vec![];
2840 let mut admin = vec![];
2841
2842 while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) {
2843 match keyword {
2844 Keyword::AUTHORIZATION => {
2845 if authorization_owner.is_some() {
2846 parser_err!("Found multiple AUTHORIZATION")
2847 } else {
2848 authorization_owner = Some(self.parse_object_name()?);
2849 Ok(())
2850 }
2851 }
2852 Keyword::LOGIN | Keyword::NOLOGIN => {
2853 if login.is_some() {
2854 parser_err!("Found multiple LOGIN or NOLOGIN")
2855 } else {
2856 login = Some(keyword == Keyword::LOGIN);
2857 Ok(())
2858 }
2859 }
2860 Keyword::INHERIT | Keyword::NOINHERIT => {
2861 if inherit.is_some() {
2862 parser_err!("Found multiple INHERIT or NOINHERIT")
2863 } else {
2864 inherit = Some(keyword == Keyword::INHERIT);
2865 Ok(())
2866 }
2867 }
2868 Keyword::BYPASSRLS | Keyword::NOBYPASSRLS => {
2869 if bypassrls.is_some() {
2870 parser_err!("Found multiple BYPASSRLS or NOBYPASSRLS")
2871 } else {
2872 bypassrls = Some(keyword == Keyword::BYPASSRLS);
2873 Ok(())
2874 }
2875 }
2876 Keyword::CREATEDB | Keyword::NOCREATEDB => {
2877 if create_db.is_some() {
2878 parser_err!("Found multiple CREATEDB or NOCREATEDB")
2879 } else {
2880 create_db = Some(keyword == Keyword::CREATEDB);
2881 Ok(())
2882 }
2883 }
2884 Keyword::CREATEROLE | Keyword::NOCREATEROLE => {
2885 if create_role.is_some() {
2886 parser_err!("Found multiple CREATEROLE or NOCREATEROLE")
2887 } else {
2888 create_role = Some(keyword == Keyword::CREATEROLE);
2889 Ok(())
2890 }
2891 }
2892 Keyword::SUPERUSER | Keyword::NOSUPERUSER => {
2893 if superuser.is_some() {
2894 parser_err!("Found multiple SUPERUSER or NOSUPERUSER")
2895 } else {
2896 superuser = Some(keyword == Keyword::SUPERUSER);
2897 Ok(())
2898 }
2899 }
2900 Keyword::REPLICATION | Keyword::NOREPLICATION => {
2901 if replication.is_some() {
2902 parser_err!("Found multiple REPLICATION or NOREPLICATION")
2903 } else {
2904 replication = Some(keyword == Keyword::REPLICATION);
2905 Ok(())
2906 }
2907 }
2908 Keyword::PASSWORD => {
2909 if password.is_some() {
2910 parser_err!("Found multiple PASSWORD")
2911 } else {
2912 password = if self.parse_keyword(Keyword::NULL) {
2913 Some(Password::NullPassword)
2914 } else {
2915 Some(Password::Password(Expr::Value(self.parse_value()?)))
2916 };
2917 Ok(())
2918 }
2919 }
2920 Keyword::CONNECTION => {
2921 self.expect_keyword(Keyword::LIMIT)?;
2922 if connection_limit.is_some() {
2923 parser_err!("Found multiple CONNECTION LIMIT")
2924 } else {
2925 connection_limit = Some(Expr::Value(self.parse_number_value()?));
2926 Ok(())
2927 }
2928 }
2929 Keyword::VALID => {
2930 self.expect_keyword(Keyword::UNTIL)?;
2931 if valid_until.is_some() {
2932 parser_err!("Found multiple VALID UNTIL")
2933 } else {
2934 valid_until = Some(Expr::Value(self.parse_value()?));
2935 Ok(())
2936 }
2937 }
2938 Keyword::IN => {
2939 if self.parse_keyword(Keyword::ROLE) {
2940 if !in_role.is_empty() {
2941 parser_err!("Found multiple IN ROLE")
2942 } else {
2943 in_role = self.parse_comma_separated(Parser::parse_identifier)?;
2944 Ok(())
2945 }
2946 } else if self.parse_keyword(Keyword::GROUP) {
2947 if !in_group.is_empty() {
2948 parser_err!("Found multiple IN GROUP")
2949 } else {
2950 in_group = self.parse_comma_separated(Parser::parse_identifier)?;
2951 Ok(())
2952 }
2953 } else {
2954 self.expected("ROLE or GROUP after IN", self.peek_token())
2955 }
2956 }
2957 Keyword::ROLE => {
2958 if !role.is_empty() {
2959 parser_err!("Found multiple ROLE")
2960 } else {
2961 role = self.parse_comma_separated(Parser::parse_identifier)?;
2962 Ok(())
2963 }
2964 }
2965 Keyword::USER => {
2966 if !user.is_empty() {
2967 parser_err!("Found multiple USER")
2968 } else {
2969 user = self.parse_comma_separated(Parser::parse_identifier)?;
2970 Ok(())
2971 }
2972 }
2973 Keyword::ADMIN => {
2974 if !admin.is_empty() {
2975 parser_err!("Found multiple ADMIN")
2976 } else {
2977 admin = self.parse_comma_separated(Parser::parse_identifier)?;
2978 Ok(())
2979 }
2980 }
2981 _ => break,
2982 }?
2983 }
2984
2985 Ok(Statement::CreateRole {
2986 names,
2987 if_not_exists,
2988 login,
2989 inherit,
2990 bypassrls,
2991 password,
2992 create_db,
2993 create_role,
2994 replication,
2995 superuser,
2996 connection_limit,
2997 valid_until,
2998 in_role,
2999 in_group,
3000 role,
3001 user,
3002 admin,
3003 authorization_owner,
3004 })
3005 }
3006
3007 pub fn parse_drop(&mut self) -> Result<Statement, ParserError> {
3008 let object_type = if self.parse_keyword(Keyword::TABLE) {
3009 ObjectType::Table
3010 } else if self.parse_keyword(Keyword::VIEW) {
3011 ObjectType::View
3012 } else if self.parse_keyword(Keyword::INDEX) {
3013 ObjectType::Index
3014 } else if self.parse_keyword(Keyword::ROLE) {
3015 ObjectType::Role
3016 } else if self.parse_keyword(Keyword::SCHEMA) {
3017 ObjectType::Schema
3018 } else if self.parse_keyword(Keyword::SEQUENCE) {
3019 ObjectType::Sequence
3020 } else if self.parse_keyword(Keyword::FUNCTION) {
3021 return self.parse_drop_function();
3022 } else {
3023 return self.expected(
3024 "TABLE, VIEW, INDEX, ROLE, SCHEMA, FUNCTION or SEQUENCE after DROP",
3025 self.peek_token(),
3026 );
3027 };
3028 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
3031 let names = self.parse_comma_separated(Parser::parse_object_name)?;
3032 let cascade = self.parse_keyword(Keyword::CASCADE);
3033 let restrict = self.parse_keyword(Keyword::RESTRICT);
3034 let purge = self.parse_keyword(Keyword::PURGE);
3035 if cascade && restrict {
3036 return parser_err!("Cannot specify both CASCADE and RESTRICT in DROP");
3037 }
3038 if object_type == ObjectType::Role && (cascade || restrict || purge) {
3039 return parser_err!("Cannot specify CASCADE, RESTRICT, or PURGE in DROP ROLE");
3040 }
3041 Ok(Statement::Drop {
3042 object_type,
3043 if_exists,
3044 names,
3045 cascade,
3046 restrict,
3047 purge,
3048 })
3049 }
3050
3051 fn parse_drop_function(&mut self) -> Result<Statement, ParserError> {
3056 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
3057 let func_desc = self.parse_comma_separated(Parser::parse_drop_function_desc)?;
3058 let option = match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
3059 Some(Keyword::CASCADE) => Some(ReferentialAction::Cascade),
3060 Some(Keyword::RESTRICT) => Some(ReferentialAction::Restrict),
3061 _ => None,
3062 };
3063 Ok(Statement::DropFunction {
3064 if_exists,
3065 func_desc,
3066 option,
3067 })
3068 }
3069
3070 fn parse_drop_function_desc(&mut self) -> Result<DropFunctionDesc, ParserError> {
3071 let name = self.parse_object_name()?;
3072
3073 let args = if self.consume_token(&Token::LParen) {
3074 if self.consume_token(&Token::RParen) {
3075 None
3076 } else {
3077 let args = self.parse_comma_separated(Parser::parse_function_arg)?;
3078 self.expect_token(&Token::RParen)?;
3079 Some(args)
3080 }
3081 } else {
3082 None
3083 };
3084
3085 Ok(DropFunctionDesc { name, args })
3086 }
3087
3088 pub fn parse_declare(&mut self) -> Result<Statement, ParserError> {
3093 let name = self.parse_identifier()?;
3094
3095 let binary = self.parse_keyword(Keyword::BINARY);
3096 let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) {
3097 Some(true)
3098 } else if self.parse_keyword(Keyword::ASENSITIVE) {
3099 Some(false)
3100 } else {
3101 None
3102 };
3103 let scroll = if self.parse_keyword(Keyword::SCROLL) {
3104 Some(true)
3105 } else if self.parse_keywords(&[Keyword::NO, Keyword::SCROLL]) {
3106 Some(false)
3107 } else {
3108 None
3109 };
3110
3111 self.expect_keyword(Keyword::CURSOR)?;
3112
3113 let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
3114 Some(keyword) => {
3115 self.expect_keyword(Keyword::HOLD)?;
3116
3117 match keyword {
3118 Keyword::WITH => Some(true),
3119 Keyword::WITHOUT => Some(false),
3120 _ => unreachable!(),
3121 }
3122 }
3123 None => None,
3124 };
3125
3126 self.expect_keyword(Keyword::FOR)?;
3127
3128 let query = self.parse_query()?;
3129
3130 Ok(Statement::Declare {
3131 name,
3132 binary,
3133 sensitive,
3134 scroll,
3135 hold,
3136 query: Box::new(query),
3137 })
3138 }
3139
3140 pub fn parse_fetch_statement(&mut self) -> Result<Statement, ParserError> {
3142 let direction = if self.parse_keyword(Keyword::NEXT) {
3143 FetchDirection::Next
3144 } else if self.parse_keyword(Keyword::PRIOR) {
3145 FetchDirection::Prior
3146 } else if self.parse_keyword(Keyword::FIRST) {
3147 FetchDirection::First
3148 } else if self.parse_keyword(Keyword::LAST) {
3149 FetchDirection::Last
3150 } else if self.parse_keyword(Keyword::ABSOLUTE) {
3151 FetchDirection::Absolute {
3152 limit: self.parse_number_value()?,
3153 }
3154 } else if self.parse_keyword(Keyword::RELATIVE) {
3155 FetchDirection::Relative {
3156 limit: self.parse_number_value()?,
3157 }
3158 } else if self.parse_keyword(Keyword::FORWARD) {
3159 if self.parse_keyword(Keyword::ALL) {
3160 FetchDirection::ForwardAll
3161 } else {
3162 FetchDirection::Forward {
3163 limit: Some(self.parse_number_value()?),
3165 }
3166 }
3167 } else if self.parse_keyword(Keyword::BACKWARD) {
3168 if self.parse_keyword(Keyword::ALL) {
3169 FetchDirection::BackwardAll
3170 } else {
3171 FetchDirection::Backward {
3172 limit: Some(self.parse_number_value()?),
3174 }
3175 }
3176 } else if self.parse_keyword(Keyword::ALL) {
3177 FetchDirection::All
3178 } else {
3179 FetchDirection::Count {
3180 limit: self.parse_number_value()?,
3181 }
3182 };
3183
3184 self.expect_one_of_keywords(&[Keyword::FROM, Keyword::IN])?;
3185
3186 let name = self.parse_identifier()?;
3187
3188 let into = if self.parse_keyword(Keyword::INTO) {
3189 Some(self.parse_object_name()?)
3190 } else {
3191 None
3192 };
3193
3194 Ok(Statement::Fetch {
3195 name,
3196 direction,
3197 into,
3198 })
3199 }
3200
3201 pub fn parse_discard(&mut self) -> Result<Statement, ParserError> {
3202 let object_type = if self.parse_keyword(Keyword::ALL) {
3203 DiscardObject::ALL
3204 } else if self.parse_keyword(Keyword::PLANS) {
3205 DiscardObject::PLANS
3206 } else if self.parse_keyword(Keyword::SEQUENCES) {
3207 DiscardObject::SEQUENCES
3208 } else if self.parse_keyword(Keyword::TEMP) || self.parse_keyword(Keyword::TEMPORARY) {
3209 DiscardObject::TEMP
3210 } else {
3211 return self.expected(
3212 "ALL, PLANS, SEQUENCES, TEMP or TEMPORARY after DISCARD",
3213 self.peek_token(),
3214 );
3215 };
3216 Ok(Statement::Discard { object_type })
3217 }
3218
3219 pub fn parse_create_index(&mut self, unique: bool) -> Result<Statement, ParserError> {
3220 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
3221 let index_name = self.parse_object_name()?;
3222 self.expect_keyword(Keyword::ON)?;
3223 let table_name = self.parse_object_name()?;
3224 let using = if self.parse_keyword(Keyword::USING) {
3225 Some(self.parse_identifier()?)
3226 } else {
3227 None
3228 };
3229 self.expect_token(&Token::LParen)?;
3230 let columns = self.parse_comma_separated(Parser::parse_order_by_expr)?;
3231 self.expect_token(&Token::RParen)?;
3232 Ok(Statement::CreateIndex {
3233 name: index_name,
3234 table_name,
3235 using,
3236 columns,
3237 unique,
3238 if_not_exists,
3239 })
3240 }
3241
3242 pub fn parse_hive_distribution(&mut self) -> Result<HiveDistributionStyle, ParserError> {
3244 if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) {
3245 self.expect_token(&Token::LParen)?;
3246 let columns = self.parse_comma_separated(Parser::parse_column_def)?;
3247 self.expect_token(&Token::RParen)?;
3248 Ok(HiveDistributionStyle::PARTITIONED { columns })
3249 } else {
3250 Ok(HiveDistributionStyle::NONE)
3251 }
3252 }
3253
3254 pub fn parse_hive_formats(&mut self) -> Result<HiveFormat, ParserError> {
3255 let mut hive_format = HiveFormat::default();
3256 loop {
3257 match self.parse_one_of_keywords(&[Keyword::ROW, Keyword::STORED, Keyword::LOCATION]) {
3258 Some(Keyword::ROW) => {
3259 hive_format.row_format = Some(self.parse_row_format()?);
3260 }
3261 Some(Keyword::STORED) => {
3262 self.expect_keyword(Keyword::AS)?;
3263 if self.parse_keyword(Keyword::INPUTFORMAT) {
3264 let input_format = self.parse_expr()?;
3265 self.expect_keyword(Keyword::OUTPUTFORMAT)?;
3266 let output_format = self.parse_expr()?;
3267 hive_format.storage = Some(HiveIOFormat::IOF {
3268 input_format,
3269 output_format,
3270 });
3271 } else {
3272 let format = self.parse_file_format()?;
3273 hive_format.storage = Some(HiveIOFormat::FileFormat { format });
3274 }
3275 }
3276 Some(Keyword::LOCATION) => {
3277 hive_format.location = Some(self.parse_literal_string()?);
3278 }
3279 None => break,
3280 _ => break,
3281 }
3282 }
3283
3284 Ok(hive_format)
3285 }
3286
3287 pub fn parse_row_format(&mut self) -> Result<HiveRowFormat, ParserError> {
3288 self.expect_keyword(Keyword::FORMAT)?;
3289 match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) {
3290 Some(Keyword::SERDE) => {
3291 let class = self.parse_literal_string()?;
3292 Ok(HiveRowFormat::SERDE { class })
3293 }
3294 _ => Ok(HiveRowFormat::DELIMITED),
3295 }
3296 }
3297
3298 pub fn parse_create_table(
3299 &mut self,
3300 or_replace: bool,
3301 temporary: bool,
3302 global: Option<bool>,
3303 transient: bool,
3304 ) -> Result<Statement, ParserError> {
3305 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
3306 let table_name = self.parse_object_name()?;
3307
3308 let on_cluster = if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) {
3310 let next_token = self.next_token();
3311 match next_token.token {
3312 Token::SingleQuotedString(s) => Some(s),
3313 Token::Word(s) => Some(s.to_string()),
3314 _ => self.expected("identifier or cluster literal", next_token)?,
3315 }
3316 } else {
3317 None
3318 };
3319
3320 let like = if self.parse_keyword(Keyword::LIKE) || self.parse_keyword(Keyword::ILIKE) {
3321 self.parse_object_name().ok()
3322 } else {
3323 None
3324 };
3325
3326 let clone = if self.parse_keyword(Keyword::CLONE) {
3327 self.parse_object_name().ok()
3328 } else {
3329 None
3330 };
3331
3332 let (columns, constraints) = self.parse_columns()?;
3334
3335 let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]);
3337
3338 let hive_distribution = self.parse_hive_distribution()?;
3339 let hive_formats = self.parse_hive_formats()?;
3340 let with_options = self.parse_options(Keyword::WITH)?;
3342 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
3343
3344 let engine = if self.parse_keyword(Keyword::ENGINE) {
3345 self.expect_token(&Token::Eq)?;
3346 let next_token = self.next_token();
3347 match next_token.token {
3348 Token::Word(w) => Some(w.value),
3349 _ => self.expected("identifier", next_token)?,
3350 }
3351 } else {
3352 None
3353 };
3354
3355 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
3356 if self.consume_token(&Token::LParen) {
3357 let columns = if self.peek_token() != Token::RParen {
3358 self.parse_comma_separated(Parser::parse_identifier)?
3359 } else {
3360 vec![]
3361 };
3362 self.expect_token(&Token::RParen)?;
3363 Some(columns)
3364 } else {
3365 Some(vec![self.parse_identifier()?])
3366 }
3367 } else {
3368 None
3369 };
3370
3371 let query = if self.parse_keyword(Keyword::AS) {
3373 Some(Box::new(self.parse_query()?))
3374 } else {
3375 None
3376 };
3377
3378 let default_charset = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) {
3379 self.expect_token(&Token::Eq)?;
3380 let next_token = self.next_token();
3381 match next_token.token {
3382 Token::Word(w) => Some(w.value),
3383 _ => self.expected("identifier", next_token)?,
3384 }
3385 } else {
3386 None
3387 };
3388
3389 let collation = if self.parse_keywords(&[Keyword::COLLATE]) {
3390 self.expect_token(&Token::Eq)?;
3391 let next_token = self.next_token();
3392 match next_token.token {
3393 Token::Word(w) => Some(w.value),
3394 _ => self.expected("identifier", next_token)?,
3395 }
3396 } else {
3397 None
3398 };
3399
3400 let on_commit: Option<OnCommit> =
3401 if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT, Keyword::DELETE, Keyword::ROWS])
3402 {
3403 Some(OnCommit::DeleteRows)
3404 } else if self.parse_keywords(&[
3405 Keyword::ON,
3406 Keyword::COMMIT,
3407 Keyword::PRESERVE,
3408 Keyword::ROWS,
3409 ]) {
3410 Some(OnCommit::PreserveRows)
3411 } else if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT, Keyword::DROP]) {
3412 Some(OnCommit::Drop)
3413 } else {
3414 None
3415 };
3416
3417 Ok(CreateTableBuilder::new(table_name)
3418 .temporary(temporary)
3419 .columns(columns)
3420 .constraints(constraints)
3421 .with_options(with_options)
3422 .table_properties(table_properties)
3423 .or_replace(or_replace)
3424 .if_not_exists(if_not_exists)
3425 .transient(transient)
3426 .hive_distribution(hive_distribution)
3427 .hive_formats(Some(hive_formats))
3428 .global(global)
3429 .query(query)
3430 .without_rowid(without_rowid)
3431 .like(like)
3432 .clone_clause(clone)
3433 .engine(engine)
3434 .order_by(order_by)
3435 .default_charset(default_charset)
3436 .collation(collation)
3437 .on_commit(on_commit)
3438 .on_cluster(on_cluster)
3439 .build())
3440 }
3441
3442 pub fn parse_columns(&mut self) -> Result<(Vec<ColumnDef>, Vec<TableConstraint>), ParserError> {
3443 let mut columns = vec![];
3444 let mut constraints = vec![];
3445 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
3446 return Ok((columns, constraints));
3447 }
3448
3449 loop {
3450 if let Some(constraint) = self.parse_optional_table_constraint()? {
3451 constraints.push(constraint);
3452 } else if let Token::Word(_) = self.peek_token().token {
3453 columns.push(self.parse_column_def()?);
3454 } else {
3455 return self.expected("column name or constraint definition", self.peek_token());
3456 }
3457 let comma = self.consume_token(&Token::Comma);
3458 if self.consume_token(&Token::RParen) {
3459 break;
3461 } else if !comma {
3462 return self.expected("',' or ')' after column definition", self.peek_token());
3463 }
3464 }
3465
3466 Ok((columns, constraints))
3467 }
3468
3469 pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
3470 let name = self.parse_identifier()?;
3471 let data_type = self.parse_data_type()?;
3472 let collation = if self.parse_keyword(Keyword::COLLATE) {
3473 Some(self.parse_object_name()?)
3474 } else {
3475 None
3476 };
3477 let mut options = vec![];
3478 loop {
3479 if self.parse_keyword(Keyword::CONSTRAINT) {
3480 let name = Some(self.parse_identifier()?);
3481 if let Some(option) = self.parse_optional_column_option()? {
3482 options.push(ColumnOptionDef { name, option });
3483 } else {
3484 return self.expected(
3485 "constraint details after CONSTRAINT <name>",
3486 self.peek_token(),
3487 );
3488 }
3489 } else if let Some(option) = self.parse_optional_column_option()? {
3490 options.push(ColumnOptionDef { name: None, option });
3491 } else {
3492 break;
3493 };
3494 }
3495 Ok(ColumnDef {
3496 name,
3497 data_type,
3498 collation,
3499 options,
3500 })
3501 }
3502
3503 pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
3504 if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
3505 Ok(Some(ColumnOption::CharacterSet(self.parse_object_name()?)))
3506 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
3507 Ok(Some(ColumnOption::NotNull))
3508 } else if self.parse_keywords(&[Keyword::COMMENT]) {
3509 let next_token = self.next_token();
3510 match next_token.token {
3511 Token::SingleQuotedString(value, ..) => Ok(Some(ColumnOption::Comment(value))),
3512 _ => self.expected("string", next_token),
3513 }
3514 } else if self.parse_keyword(Keyword::NULL) {
3515 Ok(Some(ColumnOption::Null))
3516 } else if self.parse_keyword(Keyword::DEFAULT) {
3517 Ok(Some(ColumnOption::Default(self.parse_expr()?)))
3518 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
3519 Ok(Some(ColumnOption::Unique { is_primary: true }))
3520 } else if self.parse_keyword(Keyword::UNIQUE) {
3521 Ok(Some(ColumnOption::Unique { is_primary: false }))
3522 } else if self.parse_keyword(Keyword::REFERENCES) {
3523 let foreign_table = self.parse_object_name()?;
3524 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
3527 let mut on_delete = None;
3528 let mut on_update = None;
3529 loop {
3530 if on_delete.is_none() && self.parse_keywords(&[Keyword::ON, Keyword::DELETE]) {
3531 on_delete = Some(self.parse_referential_action()?);
3532 } else if on_update.is_none()
3533 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
3534 {
3535 on_update = Some(self.parse_referential_action()?);
3536 } else {
3537 break;
3538 }
3539 }
3540 Ok(Some(ColumnOption::ForeignKey {
3541 foreign_table,
3542 referred_columns,
3543 on_delete,
3544 on_update,
3545 }))
3546 } else if self.parse_keyword(Keyword::CHECK) {
3547 self.expect_token(&Token::LParen)?;
3548 let expr = self.parse_expr()?;
3549 self.expect_token(&Token::RParen)?;
3550 Ok(Some(ColumnOption::Check(expr)))
3551 } else if self.parse_keyword(Keyword::AUTO_INCREMENT)
3552 && dialect_of!(self is MySqlDialect | GenericDialect)
3553 {
3554 Ok(Some(ColumnOption::DialectSpecific(vec![
3556 Token::make_keyword("AUTO_INCREMENT"),
3557 ])))
3558 } else if self.parse_keyword(Keyword::AUTOINCREMENT)
3559 && dialect_of!(self is SQLiteDialect | GenericDialect)
3560 {
3561 Ok(Some(ColumnOption::DialectSpecific(vec![
3563 Token::make_keyword("AUTOINCREMENT"),
3564 ])))
3565 } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
3566 && dialect_of!(self is MySqlDialect | GenericDialect)
3567 {
3568 let expr = self.parse_expr()?;
3569 Ok(Some(ColumnOption::OnUpdate(expr)))
3570 } else if self.parse_keyword(Keyword::GENERATED) {
3571 self.parse_optional_column_option_generated()
3572 } else {
3573 Ok(None)
3574 }
3575 }
3576 fn parse_optional_column_option_generated(
3577 &mut self,
3578 ) -> Result<Option<ColumnOption>, ParserError> {
3579 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) {
3580 let mut sequence_options = vec![];
3581 if self.expect_token(&Token::LParen).is_ok() {
3582 sequence_options = self.parse_create_sequence_options()?;
3583 self.expect_token(&Token::RParen)?;
3584 }
3585 Ok(Some(ColumnOption::Generated {
3586 generated_as: GeneratedAs::Always,
3587 sequence_options: Some(sequence_options),
3588 generation_expr: None,
3589 }))
3590 } else if self.parse_keywords(&[
3591 Keyword::BY,
3592 Keyword::DEFAULT,
3593 Keyword::AS,
3594 Keyword::IDENTITY,
3595 ]) {
3596 let mut sequence_options = vec![];
3597 if self.expect_token(&Token::LParen).is_ok() {
3598 sequence_options = self.parse_create_sequence_options()?;
3599 self.expect_token(&Token::RParen)?;
3600 }
3601 Ok(Some(ColumnOption::Generated {
3602 generated_as: GeneratedAs::ByDefault,
3603 sequence_options: Some(sequence_options),
3604 generation_expr: None,
3605 }))
3606 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) {
3607 if self.expect_token(&Token::LParen).is_ok() {
3608 let expr = self.parse_expr()?;
3609 self.expect_token(&Token::RParen)?;
3610 let _ = self.parse_keywords(&[Keyword::STORED]);
3611 Ok(Some(ColumnOption::Generated {
3612 generated_as: GeneratedAs::ExpStored,
3613 sequence_options: None,
3614 generation_expr: Some(expr),
3615 }))
3616 } else {
3617 Ok(None)
3618 }
3619 } else {
3620 Ok(None)
3621 }
3622 }
3623
3624 pub fn parse_referential_action(&mut self) -> Result<ReferentialAction, ParserError> {
3625 if self.parse_keyword(Keyword::RESTRICT) {
3626 Ok(ReferentialAction::Restrict)
3627 } else if self.parse_keyword(Keyword::CASCADE) {
3628 Ok(ReferentialAction::Cascade)
3629 } else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
3630 Ok(ReferentialAction::SetNull)
3631 } else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
3632 Ok(ReferentialAction::NoAction)
3633 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
3634 Ok(ReferentialAction::SetDefault)
3635 } else {
3636 self.expected(
3637 "one of RESTRICT, CASCADE, SET NULL, NO ACTION or SET DEFAULT",
3638 self.peek_token(),
3639 )
3640 }
3641 }
3642
3643 pub fn parse_optional_table_constraint(
3644 &mut self,
3645 ) -> Result<Option<TableConstraint>, ParserError> {
3646 let name = if self.parse_keyword(Keyword::CONSTRAINT) {
3647 Some(self.parse_identifier()?)
3648 } else {
3649 None
3650 };
3651
3652 let next_token = self.next_token();
3653 match next_token.token {
3654 Token::Word(w) if w.keyword == Keyword::PRIMARY || w.keyword == Keyword::UNIQUE => {
3655 let is_primary = w.keyword == Keyword::PRIMARY;
3656 if is_primary {
3657 self.expect_keyword(Keyword::KEY)?;
3658 }
3659 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
3660 Ok(Some(TableConstraint::Unique {
3661 name,
3662 columns,
3663 is_primary,
3664 }))
3665 }
3666 Token::Word(w) if w.keyword == Keyword::FOREIGN => {
3667 self.expect_keyword(Keyword::KEY)?;
3668 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
3669 self.expect_keyword(Keyword::REFERENCES)?;
3670 let foreign_table = self.parse_object_name()?;
3671 let referred_columns = self.parse_parenthesized_column_list(Mandatory, false)?;
3672 let mut on_delete = None;
3673 let mut on_update = None;
3674 loop {
3675 if on_delete.is_none() && self.parse_keywords(&[Keyword::ON, Keyword::DELETE]) {
3676 on_delete = Some(self.parse_referential_action()?);
3677 } else if on_update.is_none()
3678 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
3679 {
3680 on_update = Some(self.parse_referential_action()?);
3681 } else {
3682 break;
3683 }
3684 }
3685 Ok(Some(TableConstraint::ForeignKey {
3686 name,
3687 columns,
3688 foreign_table,
3689 referred_columns,
3690 on_delete,
3691 on_update,
3692 }))
3693 }
3694 Token::Word(w) if w.keyword == Keyword::CHECK => {
3695 self.expect_token(&Token::LParen)?;
3696 let expr = Box::new(self.parse_expr()?);
3697 self.expect_token(&Token::RParen)?;
3698 Ok(Some(TableConstraint::Check { name, expr }))
3699 }
3700 Token::Word(w)
3701 if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
3702 && dialect_of!(self is GenericDialect | MySqlDialect) =>
3703 {
3704 let display_as_key = w.keyword == Keyword::KEY;
3705
3706 let name = match self.peek_token().token {
3707 Token::Word(word) if word.keyword == Keyword::USING => None,
3708 _ => self.maybe_parse(|parser| parser.parse_identifier()),
3709 };
3710
3711 let index_type = if self.parse_keyword(Keyword::USING) {
3712 Some(self.parse_index_type()?)
3713 } else {
3714 None
3715 };
3716 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
3717
3718 Ok(Some(TableConstraint::Index {
3719 display_as_key,
3720 name,
3721 index_type,
3722 columns,
3723 }))
3724 }
3725 Token::Word(w)
3726 if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL)
3727 && dialect_of!(self is GenericDialect | MySqlDialect) =>
3728 {
3729 if let Some(name) = name {
3730 return self.expected(
3731 "FULLTEXT or SPATIAL option without constraint name",
3732 TokenWithLocation {
3733 token: Token::make_keyword(&name.to_string()),
3734 location: next_token.location,
3735 },
3736 );
3737 }
3738
3739 let fulltext = w.keyword == Keyword::FULLTEXT;
3740
3741 let index_type_display = if self.parse_keyword(Keyword::KEY) {
3742 KeyOrIndexDisplay::Key
3743 } else if self.parse_keyword(Keyword::INDEX) {
3744 KeyOrIndexDisplay::Index
3745 } else {
3746 KeyOrIndexDisplay::None
3747 };
3748
3749 let opt_index_name = self.maybe_parse(|parser| parser.parse_identifier());
3750
3751 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
3752
3753 Ok(Some(TableConstraint::FulltextOrSpatial {
3754 fulltext,
3755 index_type_display,
3756 opt_index_name,
3757 columns,
3758 }))
3759 }
3760 _ => {
3761 if name.is_some() {
3762 self.expected("PRIMARY, UNIQUE, FOREIGN, or CHECK", next_token)
3763 } else {
3764 self.prev_token();
3765 Ok(None)
3766 }
3767 }
3768 }
3769 }
3770
3771 pub fn parse_options(&mut self, keyword: Keyword) -> Result<Vec<SqlOption>, ParserError> {
3772 if self.parse_keyword(keyword) {
3773 self.expect_token(&Token::LParen)?;
3774 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
3775 self.expect_token(&Token::RParen)?;
3776 Ok(options)
3777 } else {
3778 Ok(vec![])
3779 }
3780 }
3781
3782 pub fn parse_index_type(&mut self) -> Result<IndexType, ParserError> {
3783 if self.parse_keyword(Keyword::BTREE) {
3784 Ok(IndexType::BTree)
3785 } else if self.parse_keyword(Keyword::HASH) {
3786 Ok(IndexType::Hash)
3787 } else {
3788 self.expected("index type {BTREE | HASH}", self.peek_token())
3789 }
3790 }
3791
3792 pub fn parse_sql_option(&mut self) -> Result<SqlOption, ParserError> {
3793 let name = self.parse_identifier()?;
3794 self.expect_token(&Token::Eq)?;
3795 let value = self.parse_value()?;
3796 Ok(SqlOption { name, value })
3797 }
3798
3799 pub fn parse_alter(&mut self) -> Result<Statement, ParserError> {
3800 let object_type = self.expect_one_of_keywords(&[Keyword::TABLE, Keyword::INDEX])?;
3801 match object_type {
3802 Keyword::TABLE => {
3803 let _ = self.parse_keyword(Keyword::ONLY); let table_name = self.parse_object_name()?;
3805 let operation = if self.parse_keyword(Keyword::ADD) {
3806 if let Some(constraint) = self.parse_optional_table_constraint()? {
3807 AlterTableOperation::AddConstraint(constraint)
3808 } else {
3809 let if_not_exists =
3810 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
3811 if self.parse_keyword(Keyword::PARTITION) {
3812 self.expect_token(&Token::LParen)?;
3813 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
3814 self.expect_token(&Token::RParen)?;
3815 AlterTableOperation::AddPartitions {
3816 if_not_exists,
3817 new_partitions: partitions,
3818 }
3819 } else {
3820 let column_keyword = self.parse_keyword(Keyword::COLUMN);
3821
3822 let if_not_exists = if dialect_of!(self is PostgreSqlDialect | BigQueryDialect | GenericDialect)
3823 {
3824 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS])
3825 || if_not_exists
3826 } else {
3827 false
3828 };
3829
3830 let column_def = self.parse_column_def()?;
3831 AlterTableOperation::AddColumn {
3832 column_keyword,
3833 if_not_exists,
3834 column_def,
3835 }
3836 }
3837 }
3838 } else if self.parse_keyword(Keyword::RENAME) {
3839 if dialect_of!(self is PostgreSqlDialect)
3840 && self.parse_keyword(Keyword::CONSTRAINT)
3841 {
3842 let old_name = self.parse_identifier()?;
3843 self.expect_keyword(Keyword::TO)?;
3844 let new_name = self.parse_identifier()?;
3845 AlterTableOperation::RenameConstraint { old_name, new_name }
3846 } else if self.parse_keyword(Keyword::TO) {
3847 let table_name = self.parse_object_name()?;
3848 AlterTableOperation::RenameTable { table_name }
3849 } else {
3850 let _ = self.parse_keyword(Keyword::COLUMN); let old_column_name = self.parse_identifier()?;
3852 self.expect_keyword(Keyword::TO)?;
3853 let new_column_name = self.parse_identifier()?;
3854 AlterTableOperation::RenameColumn {
3855 old_column_name,
3856 new_column_name,
3857 }
3858 }
3859 } else if self.parse_keyword(Keyword::DROP) {
3860 if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) {
3861 self.expect_token(&Token::LParen)?;
3862 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
3863 self.expect_token(&Token::RParen)?;
3864 AlterTableOperation::DropPartitions {
3865 partitions,
3866 if_exists: true,
3867 }
3868 } else if self.parse_keyword(Keyword::PARTITION) {
3869 self.expect_token(&Token::LParen)?;
3870 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
3871 self.expect_token(&Token::RParen)?;
3872 AlterTableOperation::DropPartitions {
3873 partitions,
3874 if_exists: false,
3875 }
3876 } else if self.parse_keyword(Keyword::CONSTRAINT) {
3877 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
3878 let name = self.parse_identifier()?;
3879 let cascade = self.parse_keyword(Keyword::CASCADE);
3880 AlterTableOperation::DropConstraint {
3881 if_exists,
3882 name,
3883 cascade,
3884 }
3885 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
3886 && dialect_of!(self is MySqlDialect | GenericDialect)
3887 {
3888 AlterTableOperation::DropPrimaryKey
3889 } else {
3890 let _ = self.parse_keyword(Keyword::COLUMN); let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
3892 let column_name = self.parse_identifier()?;
3893 let cascade = self.parse_keyword(Keyword::CASCADE);
3894 AlterTableOperation::DropColumn {
3895 column_name,
3896 if_exists,
3897 cascade,
3898 }
3899 }
3900 } else if self.parse_keyword(Keyword::PARTITION) {
3901 self.expect_token(&Token::LParen)?;
3902 let before = self.parse_comma_separated(Parser::parse_expr)?;
3903 self.expect_token(&Token::RParen)?;
3904 self.expect_keyword(Keyword::RENAME)?;
3905 self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?;
3906 self.expect_token(&Token::LParen)?;
3907 let renames = self.parse_comma_separated(Parser::parse_expr)?;
3908 self.expect_token(&Token::RParen)?;
3909 AlterTableOperation::RenamePartitions {
3910 old_partitions: before,
3911 new_partitions: renames,
3912 }
3913 } else if self.parse_keyword(Keyword::CHANGE) {
3914 let _ = self.parse_keyword(Keyword::COLUMN); let old_name = self.parse_identifier()?;
3916 let new_name = self.parse_identifier()?;
3917 let data_type = self.parse_data_type()?;
3918 let mut options = vec![];
3919 while let Some(option) = self.parse_optional_column_option()? {
3920 options.push(option);
3921 }
3922
3923 AlterTableOperation::ChangeColumn {
3924 old_name,
3925 new_name,
3926 data_type,
3927 options,
3928 }
3929 } else if self.parse_keyword(Keyword::ALTER) {
3930 let _ = self.parse_keyword(Keyword::COLUMN); let column_name = self.parse_identifier()?;
3932 let is_postgresql = dialect_of!(self is PostgreSqlDialect);
3933
3934 let op = if self.parse_keywords(&[Keyword::SET, Keyword::NOT, Keyword::NULL]) {
3935 AlterColumnOperation::SetNotNull {}
3936 } else if self.parse_keywords(&[Keyword::DROP, Keyword::NOT, Keyword::NULL]) {
3937 AlterColumnOperation::DropNotNull {}
3938 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
3939 AlterColumnOperation::SetDefault {
3940 value: self.parse_expr()?,
3941 }
3942 } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
3943 AlterColumnOperation::DropDefault {}
3944 } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE])
3945 || (is_postgresql && self.parse_keyword(Keyword::TYPE))
3946 {
3947 let data_type = self.parse_data_type()?;
3948 let using = if is_postgresql && self.parse_keyword(Keyword::USING) {
3949 Some(self.parse_expr()?)
3950 } else {
3951 None
3952 };
3953 AlterColumnOperation::SetDataType { data_type, using }
3954 } else {
3955 return self.expected(
3956 "SET/DROP NOT NULL, SET DEFAULT, SET DATA TYPE after ALTER COLUMN",
3957 self.peek_token(),
3958 );
3959 };
3960 AlterTableOperation::AlterColumn { column_name, op }
3961 } else if self.parse_keyword(Keyword::SWAP) {
3962 self.expect_keyword(Keyword::WITH)?;
3963 let table_name = self.parse_object_name()?;
3964 AlterTableOperation::SwapWith { table_name }
3965 } else {
3966 return self.expected(
3967 "ADD, RENAME, PARTITION, SWAP or DROP after ALTER TABLE",
3968 self.peek_token(),
3969 );
3970 };
3971 Ok(Statement::AlterTable {
3972 name: table_name,
3973 operation,
3974 })
3975 }
3976 Keyword::INDEX => {
3977 let index_name = self.parse_object_name()?;
3978 let operation = if self.parse_keyword(Keyword::RENAME) {
3979 if self.parse_keyword(Keyword::TO) {
3980 let index_name = self.parse_object_name()?;
3981 AlterIndexOperation::RenameIndex { index_name }
3982 } else {
3983 return self.expected("TO after RENAME", self.peek_token());
3984 }
3985 } else {
3986 return self.expected("RENAME after ALTER INDEX", self.peek_token());
3987 };
3988
3989 Ok(Statement::AlterIndex {
3990 name: index_name,
3991 operation,
3992 })
3993 }
3994 _ => unreachable!(),
3996 }
3997 }
3998
3999 pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
4001 let table_name = self.parse_object_name()?;
4002 let columns = self.parse_parenthesized_column_list(Optional, false)?;
4003 let to = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::TO]) {
4004 Some(Keyword::FROM) => false,
4005 Some(Keyword::TO) => true,
4006 _ => self.expected("FROM or TO", self.peek_token())?,
4007 };
4008 let target = if self.parse_keyword(Keyword::STDIN) {
4009 CopyTarget::Stdin
4010 } else if self.parse_keyword(Keyword::STDOUT) {
4011 CopyTarget::Stdout
4012 } else if self.parse_keyword(Keyword::PROGRAM) {
4013 CopyTarget::Program {
4014 command: self.parse_literal_string()?,
4015 }
4016 } else {
4017 CopyTarget::File {
4018 filename: self.parse_literal_string()?,
4019 }
4020 };
4021 let _ = self.parse_keyword(Keyword::WITH); let mut options = vec![];
4023 if self.consume_token(&Token::LParen) {
4024 options = self.parse_comma_separated(Parser::parse_copy_option)?;
4025 self.expect_token(&Token::RParen)?;
4026 }
4027 let mut legacy_options = vec![];
4028 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option()) {
4029 legacy_options.push(opt);
4030 }
4031 let values = if let CopyTarget::Stdin = target {
4032 self.expect_token(&Token::SemiColon)?;
4033 self.parse_tsv()
4034 } else {
4035 vec![]
4036 };
4037 Ok(Statement::Copy {
4038 table_name,
4039 columns,
4040 to,
4041 target,
4042 options,
4043 legacy_options,
4044 values,
4045 })
4046 }
4047
4048 pub fn parse_close(&mut self) -> Result<Statement, ParserError> {
4049 let cursor = if self.parse_keyword(Keyword::ALL) {
4050 CloseCursor::All
4051 } else {
4052 let name = self.parse_identifier()?;
4053
4054 CloseCursor::Specific { name }
4055 };
4056
4057 Ok(Statement::Close { cursor })
4058 }
4059
4060 fn parse_copy_option(&mut self) -> Result<CopyOption, ParserError> {
4061 let ret = match self.parse_one_of_keywords(&[
4062 Keyword::FORMAT,
4063 Keyword::FREEZE,
4064 Keyword::DELIMITER,
4065 Keyword::NULL,
4066 Keyword::HEADER,
4067 Keyword::QUOTE,
4068 Keyword::ESCAPE,
4069 Keyword::FORCE_QUOTE,
4070 Keyword::FORCE_NOT_NULL,
4071 Keyword::FORCE_NULL,
4072 Keyword::ENCODING,
4073 ]) {
4074 Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier()?),
4075 Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!(
4076 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
4077 Some(Keyword::FALSE)
4078 )),
4079 Some(Keyword::DELIMITER) => CopyOption::Delimiter(self.parse_literal_char()?),
4080 Some(Keyword::NULL) => CopyOption::Null(self.parse_literal_string()?),
4081 Some(Keyword::HEADER) => CopyOption::Header(!matches!(
4082 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
4083 Some(Keyword::FALSE)
4084 )),
4085 Some(Keyword::QUOTE) => CopyOption::Quote(self.parse_literal_char()?),
4086 Some(Keyword::ESCAPE) => CopyOption::Escape(self.parse_literal_char()?),
4087 Some(Keyword::FORCE_QUOTE) => {
4088 CopyOption::ForceQuote(self.parse_parenthesized_column_list(Mandatory, false)?)
4089 }
4090 Some(Keyword::FORCE_NOT_NULL) => {
4091 CopyOption::ForceNotNull(self.parse_parenthesized_column_list(Mandatory, false)?)
4092 }
4093 Some(Keyword::FORCE_NULL) => {
4094 CopyOption::ForceNull(self.parse_parenthesized_column_list(Mandatory, false)?)
4095 }
4096 Some(Keyword::ENCODING) => CopyOption::Encoding(self.parse_literal_string()?),
4097 _ => self.expected("option", self.peek_token())?,
4098 };
4099 Ok(ret)
4100 }
4101
4102 fn parse_copy_legacy_option(&mut self) -> Result<CopyLegacyOption, ParserError> {
4103 let ret = match self.parse_one_of_keywords(&[
4104 Keyword::BINARY,
4105 Keyword::DELIMITER,
4106 Keyword::NULL,
4107 Keyword::CSV,
4108 ]) {
4109 Some(Keyword::BINARY) => CopyLegacyOption::Binary,
4110 Some(Keyword::DELIMITER) => {
4111 let _ = self.parse_keyword(Keyword::AS); CopyLegacyOption::Delimiter(self.parse_literal_char()?)
4113 }
4114 Some(Keyword::NULL) => {
4115 let _ = self.parse_keyword(Keyword::AS); CopyLegacyOption::Null(self.parse_literal_string()?)
4117 }
4118 Some(Keyword::CSV) => CopyLegacyOption::Csv({
4119 let mut opts = vec![];
4120 while let Some(opt) =
4121 self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())
4122 {
4123 opts.push(opt);
4124 }
4125 opts
4126 }),
4127 _ => self.expected("option", self.peek_token())?,
4128 };
4129 Ok(ret)
4130 }
4131
4132 fn parse_copy_legacy_csv_option(&mut self) -> Result<CopyLegacyCsvOption, ParserError> {
4133 let ret = match self.parse_one_of_keywords(&[
4134 Keyword::HEADER,
4135 Keyword::QUOTE,
4136 Keyword::ESCAPE,
4137 Keyword::FORCE,
4138 ]) {
4139 Some(Keyword::HEADER) => CopyLegacyCsvOption::Header,
4140 Some(Keyword::QUOTE) => {
4141 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Quote(self.parse_literal_char()?)
4143 }
4144 Some(Keyword::ESCAPE) => {
4145 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Escape(self.parse_literal_char()?)
4147 }
4148 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => {
4149 CopyLegacyCsvOption::ForceNotNull(
4150 self.parse_comma_separated(Parser::parse_identifier)?,
4151 )
4152 }
4153 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => {
4154 CopyLegacyCsvOption::ForceQuote(
4155 self.parse_comma_separated(Parser::parse_identifier)?,
4156 )
4157 }
4158 _ => self.expected("csv option", self.peek_token())?,
4159 };
4160 Ok(ret)
4161 }
4162
4163 fn parse_literal_char(&mut self) -> Result<char, ParserError> {
4164 let s = self.parse_literal_string()?;
4165 if s.len() != 1 {
4166 return parser_err!(format!("Expect a char, found {s:?}"));
4167 }
4168 Ok(s.chars().next().unwrap())
4169 }
4170
4171 pub fn parse_tsv(&mut self) -> Vec<Option<String>> {
4174 self.parse_tab_value()
4175 }
4176
4177 pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
4178 let mut values = vec![];
4179 let mut content = String::from("");
4180 while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
4181 match t {
4182 Token::Whitespace(Whitespace::Tab) => {
4183 values.push(Some(content.to_string()));
4184 content.clear();
4185 }
4186 Token::Whitespace(Whitespace::Newline) => {
4187 values.push(Some(content.to_string()));
4188 content.clear();
4189 }
4190 Token::Backslash => {
4191 if self.consume_token(&Token::Period) {
4192 return values;
4193 }
4194 if let Token::Word(w) = self.next_token().token {
4195 if w.value == "N" {
4196 values.push(None);
4197 }
4198 }
4199 }
4200 _ => {
4201 content.push_str(&t.to_string());
4202 }
4203 }
4204 }
4205 values
4206 }
4207
4208 pub fn parse_value(&mut self) -> Result<Value, ParserError> {
4210 let next_token = self.next_token();
4211 let location = next_token.location;
4212 match next_token.token {
4213 Token::Word(w) => match w.keyword {
4214 Keyword::TRUE => Ok(Value::Boolean(true)),
4215 Keyword::FALSE => Ok(Value::Boolean(false)),
4216 Keyword::NULL => Ok(Value::Null),
4217 Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style {
4218 Some('"') => Ok(Value::DoubleQuotedString(w.value)),
4219 Some('\'') => Ok(Value::SingleQuotedString(w.value)),
4220 _ => self.expected(
4221 "A value?",
4222 TokenWithLocation {
4223 token: Token::Word(w),
4224 location,
4225 },
4226 )?,
4227 },
4228 Keyword::NoKeyword | Keyword::LOCATION | Keyword::TYPE if dialect_of!(self is SnowflakeDialect | GenericDialect) => {
4230 Ok(Value::UnQuotedString(w.value))
4231 }
4232 _ => self.expected(
4233 "a concrete value",
4234 TokenWithLocation {
4235 token: Token::Word(w),
4236 location,
4237 },
4238 ),
4239 },
4240 Token::Number(ref n, l) => match n.parse() {
4244 Ok(n) => Ok(Value::Number(n, l)),
4245 Err(e) => parser_err!(format!("Could not parse '{n}' as number: {e}")),
4246 },
4247 Token::SingleQuotedString(ref s) => Ok(Value::SingleQuotedString(s.to_string())),
4248 Token::DoubleQuotedString(ref s) => Ok(Value::DoubleQuotedString(s.to_string())),
4249 Token::DollarQuotedString(ref s) => Ok(Value::DollarQuotedString(s.clone())),
4250 Token::SingleQuotedByteStringLiteral(ref s) => {
4251 Ok(Value::SingleQuotedByteStringLiteral(s.clone()))
4252 }
4253 Token::DoubleQuotedByteStringLiteral(ref s) => {
4254 Ok(Value::DoubleQuotedByteStringLiteral(s.clone()))
4255 }
4256 Token::RawStringLiteral(ref s) => Ok(Value::RawStringLiteral(s.clone())),
4257 Token::NationalStringLiteral(ref s) => Ok(Value::NationalStringLiteral(s.to_string())),
4258 Token::EscapedStringLiteral(ref s) => Ok(Value::EscapedStringLiteral(s.to_string())),
4259 Token::HexStringLiteral(ref s) => Ok(Value::HexStringLiteral(s.to_string())),
4260 Token::Placeholder(ref s) => Ok(Value::Placeholder(s.to_string())),
4261 tok @ Token::Colon | tok @ Token::AtSign => {
4262 let ident = self.parse_identifier()?;
4263 let placeholder = tok.to_string() + &ident.value;
4264 Ok(Value::Placeholder(placeholder))
4265 }
4266 unexpected => self.expected(
4267 "a value",
4268 TokenWithLocation {
4269 token: unexpected,
4270 location,
4271 },
4272 ),
4273 }
4274 }
4275
4276 pub fn parse_number_value(&mut self) -> Result<Value, ParserError> {
4277 match self.parse_value()? {
4278 v @ Value::Number(_, _) => Ok(v),
4279 v @ Value::Placeholder(_) => Ok(v),
4280 _ => {
4281 self.prev_token();
4282 self.expected("literal number", self.peek_token())
4283 }
4284 }
4285 }
4286
4287 fn parse_introduced_string_value(&mut self) -> Result<Value, ParserError> {
4288 let next_token = self.next_token();
4289 let location = next_token.location;
4290 match next_token.token {
4291 Token::SingleQuotedString(ref s) => Ok(Value::SingleQuotedString(s.to_string())),
4292 Token::DoubleQuotedString(ref s) => Ok(Value::DoubleQuotedString(s.to_string())),
4293 Token::HexStringLiteral(ref s) => Ok(Value::HexStringLiteral(s.to_string())),
4294 unexpected => self.expected(
4295 "a string value",
4296 TokenWithLocation {
4297 token: unexpected,
4298 location,
4299 },
4300 ),
4301 }
4302 }
4303
4304 pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
4306 let next_token = self.next_token();
4307 match next_token.token {
4308 Token::Number(s, _) => s.parse::<u64>().map_err(|e| {
4309 ParserError::ParserError(format!("Could not parse '{s}' as u64: {e}"))
4310 }),
4311 _ => self.expected("literal int", next_token),
4312 }
4313 }
4314
4315 pub fn parse_function_definition(&mut self) -> Result<FunctionDefinition, ParserError> {
4316 let peek_token = self.peek_token();
4317 match peek_token.token {
4318 Token::DollarQuotedString(value) if dialect_of!(self is PostgreSqlDialect) => {
4319 self.next_token();
4320 Ok(FunctionDefinition::DoubleDollarDef(value.value))
4321 }
4322 _ => Ok(FunctionDefinition::SingleQuotedDef(
4323 self.parse_literal_string()?,
4324 )),
4325 }
4326 }
4327 pub fn parse_literal_string(&mut self) -> Result<String, ParserError> {
4329 let next_token = self.next_token();
4330 match next_token.token {
4331 Token::Word(Word { value, keyword, .. }) if keyword == Keyword::NoKeyword => Ok(value),
4332 Token::SingleQuotedString(s) => Ok(s),
4333 Token::DoubleQuotedString(s) => Ok(s),
4334 Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
4335 Ok(s)
4336 }
4337 _ => self.expected("literal string", next_token),
4338 }
4339 }
4340
4341 pub fn parse_map_key(&mut self) -> Result<Expr, ParserError> {
4343 let next_token = self.next_token();
4344 match next_token.token {
4345 Token::Word(Word { value, keyword, .. })
4347 if (dialect_of!(self is BigQueryDialect) && keyword == Keyword::OFFSET) =>
4348 {
4349 self.parse_function(ObjectName(vec![Ident::new(value)]))
4350 }
4351 Token::Word(Word { value, keyword, .. }) if (keyword == Keyword::NoKeyword) => {
4352 if self.peek_token() == Token::LParen {
4353 return self.parse_function(ObjectName(vec![Ident::new(value)]));
4354 }
4355 Ok(Expr::Value(Value::SingleQuotedString(value)))
4356 }
4357 Token::SingleQuotedString(s) => Ok(Expr::Value(Value::SingleQuotedString(s))),
4358 #[cfg(not(feature = "bigdecimal"))]
4359 Token::Number(s, _) => Ok(Expr::Value(Value::Number(s, false))),
4360 #[cfg(feature = "bigdecimal")]
4361 Token::Number(s, _) => Ok(Expr::Value(Value::Number(s.parse().unwrap(), false))),
4362 _ => self.expected("literal string, number or function", next_token),
4363 }
4364 }
4365
4366 pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
4368 let next_token = self.next_token();
4369 let mut data = match next_token.token {
4370 Token::Word(w) => match w.keyword {
4371 Keyword::BOOLEAN => Ok(DataType::Boolean),
4372 Keyword::FLOAT => Ok(DataType::Float(self.parse_optional_precision()?)),
4373 Keyword::REAL => Ok(DataType::Real),
4374 Keyword::DOUBLE => {
4375 if self.parse_keyword(Keyword::PRECISION) {
4376 Ok(DataType::DoublePrecision)
4377 } else {
4378 Ok(DataType::Double)
4379 }
4380 }
4381 Keyword::TINYINT => {
4382 let optional_precision = self.parse_optional_precision();
4383 if self.parse_keyword(Keyword::UNSIGNED) {
4384 Ok(DataType::UnsignedTinyInt(optional_precision?))
4385 } else {
4386 Ok(DataType::TinyInt(optional_precision?))
4387 }
4388 }
4389 Keyword::SMALLINT => {
4390 let optional_precision = self.parse_optional_precision();
4391 if self.parse_keyword(Keyword::UNSIGNED) {
4392 Ok(DataType::UnsignedSmallInt(optional_precision?))
4393 } else {
4394 Ok(DataType::SmallInt(optional_precision?))
4395 }
4396 }
4397 Keyword::MEDIUMINT => {
4398 let optional_precision = self.parse_optional_precision();
4399 if self.parse_keyword(Keyword::UNSIGNED) {
4400 Ok(DataType::UnsignedMediumInt(optional_precision?))
4401 } else {
4402 Ok(DataType::MediumInt(optional_precision?))
4403 }
4404 }
4405 Keyword::INT => {
4406 let optional_precision = self.parse_optional_precision();
4407 if self.parse_keyword(Keyword::UNSIGNED) {
4408 Ok(DataType::UnsignedInt(optional_precision?))
4409 } else {
4410 Ok(DataType::Int(optional_precision?))
4411 }
4412 }
4413 Keyword::INTEGER => {
4414 let optional_precision = self.parse_optional_precision();
4415 if self.parse_keyword(Keyword::UNSIGNED) {
4416 Ok(DataType::UnsignedInteger(optional_precision?))
4417 } else {
4418 Ok(DataType::Integer(optional_precision?))
4419 }
4420 }
4421 Keyword::BIGINT => {
4422 let optional_precision = self.parse_optional_precision();
4423 if self.parse_keyword(Keyword::UNSIGNED) {
4424 Ok(DataType::UnsignedBigInt(optional_precision?))
4425 } else {
4426 Ok(DataType::BigInt(optional_precision?))
4427 }
4428 }
4429 Keyword::VARCHAR => Ok(DataType::Varchar(self.parse_optional_character_length()?)),
4430 Keyword::NVARCHAR => Ok(DataType::Nvarchar(self.parse_optional_precision()?)),
4431 Keyword::CHARACTER => {
4432 if self.parse_keyword(Keyword::VARYING) {
4433 Ok(DataType::CharacterVarying(
4434 self.parse_optional_character_length()?,
4435 ))
4436 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
4437 Ok(DataType::CharacterLargeObject(
4438 self.parse_optional_precision()?,
4439 ))
4440 } else {
4441 Ok(DataType::Character(self.parse_optional_character_length()?))
4442 }
4443 }
4444 Keyword::CHAR => {
4445 if self.parse_keyword(Keyword::VARYING) {
4446 Ok(DataType::CharVarying(
4447 self.parse_optional_character_length()?,
4448 ))
4449 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
4450 Ok(DataType::CharLargeObject(self.parse_optional_precision()?))
4451 } else {
4452 Ok(DataType::Char(self.parse_optional_character_length()?))
4453 }
4454 }
4455 Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)),
4456 Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)),
4457 Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_precision()?)),
4458 Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)),
4459 Keyword::UUID => Ok(DataType::Uuid),
4460 Keyword::DATE => Ok(DataType::Date),
4461 Keyword::DATETIME => Ok(DataType::Datetime(self.parse_optional_precision()?)),
4462 Keyword::TIMESTAMP => {
4463 let precision = self.parse_optional_precision()?;
4464 let tz = if self.parse_keyword(Keyword::WITH) {
4465 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
4466 TimezoneInfo::WithTimeZone
4467 } else if self.parse_keyword(Keyword::WITHOUT) {
4468 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
4469 TimezoneInfo::WithoutTimeZone
4470 } else {
4471 TimezoneInfo::None
4472 };
4473 Ok(DataType::Timestamp(precision, tz))
4474 }
4475 Keyword::TIMESTAMPTZ => Ok(DataType::Timestamp(
4476 self.parse_optional_precision()?,
4477 TimezoneInfo::Tz,
4478 )),
4479 Keyword::TIME => {
4480 let precision = self.parse_optional_precision()?;
4481 let tz = if self.parse_keyword(Keyword::WITH) {
4482 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
4483 TimezoneInfo::WithTimeZone
4484 } else if self.parse_keyword(Keyword::WITHOUT) {
4485 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
4486 TimezoneInfo::WithoutTimeZone
4487 } else {
4488 TimezoneInfo::None
4489 };
4490 Ok(DataType::Time(precision, tz))
4491 }
4492 Keyword::TIMETZ => Ok(DataType::Time(
4493 self.parse_optional_precision()?,
4494 TimezoneInfo::Tz,
4495 )),
4496 Keyword::INTERVAL => Ok(DataType::Interval),
4500 Keyword::JSON => Ok(DataType::JSON),
4501 Keyword::REGCLASS => Ok(DataType::Regclass),
4502 Keyword::STRING => Ok(DataType::String),
4503 Keyword::TEXT => Ok(DataType::Text),
4504 Keyword::BYTEA => Ok(DataType::Bytea),
4505 Keyword::NUMERIC => Ok(DataType::Numeric(
4506 self.parse_exact_number_optional_precision_scale()?,
4507 )),
4508 Keyword::DECIMAL => Ok(DataType::Decimal(
4509 self.parse_exact_number_optional_precision_scale()?,
4510 )),
4511 Keyword::DEC => Ok(DataType::Dec(
4512 self.parse_exact_number_optional_precision_scale()?,
4513 )),
4514 Keyword::BIGNUMERIC => Ok(DataType::BigNumeric(
4515 self.parse_exact_number_optional_precision_scale()?,
4516 )),
4517 Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
4518 self.parse_exact_number_optional_precision_scale()?,
4519 )),
4520 Keyword::ENUM => Ok(DataType::Enum(self.parse_string_values()?)),
4521 Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
4522 Keyword::ARRAY => {
4523 if dialect_of!(self is SnowflakeDialect) {
4524 Ok(DataType::Array(None))
4525 } else {
4526 self.expect_token(&Token::Lt)?;
4530 let inside_type = self.parse_data_type()?;
4531 self.expect_token(&Token::Gt)?;
4532 Ok(DataType::Array(Some(Box::new(inside_type))))
4533 }
4534 }
4535 _ => {
4536 self.prev_token();
4537 let type_name = self.parse_object_name()?;
4538 if let Some(modifiers) = self.parse_optional_type_modifiers()? {
4539 Ok(DataType::Custom(type_name, modifiers))
4540 } else {
4541 Ok(DataType::Custom(type_name, vec![]))
4542 }
4543 }
4544 },
4545 _ => self.expected("a data type name", next_token),
4546 }?;
4547
4548 while self.consume_token(&Token::LBracket) {
4551 self.expect_token(&Token::RBracket)?;
4552 data = DataType::Array(Some(Box::new(data)))
4553 }
4554 Ok(data)
4555 }
4556
4557 pub fn parse_string_values(&mut self) -> Result<Vec<String>, ParserError> {
4558 self.expect_token(&Token::LParen)?;
4559 let mut values = Vec::new();
4560 loop {
4561 let next_token = self.next_token();
4562 match next_token.token {
4563 Token::SingleQuotedString(value) => values.push(value),
4564 _ => self.expected("a string", next_token)?,
4565 }
4566 let next_token = self.next_token();
4567 match next_token.token {
4568 Token::Comma => (),
4569 Token::RParen => break,
4570 _ => self.expected(", or }", next_token)?,
4571 }
4572 }
4573 Ok(values)
4574 }
4575
4576 pub fn parse_identifier_with_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
4578 let ident = self.parse_identifier()?;
4579 self.expect_keyword(Keyword::AS)?;
4580 let alias = self.parse_identifier()?;
4581 Ok(IdentWithAlias { ident, alias })
4582 }
4583
4584 pub fn parse_optional_alias(
4588 &mut self,
4589 reserved_kwds: &[Keyword],
4590 ) -> Result<Option<Ident>, ParserError> {
4591 let after_as = self.parse_keyword(Keyword::AS);
4592 let next_token = self.next_token();
4593 match next_token.token {
4594 Token::Word(w) if after_as || !reserved_kwds.contains(&w.keyword) => {
4600 Ok(Some(w.to_ident()))
4601 }
4602 Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))),
4615 Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))),
4617 _ => {
4618 if after_as {
4619 return self.expected("an identifier after AS", next_token);
4620 }
4621 self.prev_token();
4622 Ok(None) }
4624 }
4625 }
4626
4627 pub fn parse_optional_table_alias(
4632 &mut self,
4633 reserved_kwds: &[Keyword],
4634 ) -> Result<Option<TableAlias>, ParserError> {
4635 match self.parse_optional_alias(reserved_kwds)? {
4636 Some(name) => {
4637 let columns = self.parse_parenthesized_column_list(Optional, false)?;
4638 Ok(Some(TableAlias { name, columns }))
4639 }
4640 None => Ok(None),
4641 }
4642 }
4643
4644 pub fn parse_object_name(&mut self) -> Result<ObjectName, ParserError> {
4647 let mut idents = vec![];
4648 loop {
4649 idents.push(self.parse_identifier()?);
4650 if !self.consume_token(&Token::Period) {
4651 break;
4652 }
4653 }
4654 Ok(ObjectName(idents))
4655 }
4656
4657 pub fn parse_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
4659 let mut idents = vec![];
4660 loop {
4661 match self.peek_token().token {
4662 Token::Word(w) => {
4663 idents.push(w.to_ident());
4664 }
4665 Token::EOF | Token::Eq => break,
4666 _ => {}
4667 }
4668 self.next_token();
4669 }
4670 Ok(idents)
4671 }
4672
4673 pub fn parse_identifier(&mut self) -> Result<Ident, ParserError> {
4675 let next_token = self.next_token();
4676 match next_token.token {
4677 Token::Word(w) => Ok(w.to_ident()),
4678 Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
4679 Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
4680 _ => self.expected("identifier", next_token),
4681 }
4682 }
4683
4684 pub fn parse_parenthesized_column_list(
4686 &mut self,
4687 optional: IsOptional,
4688 allow_empty: bool,
4689 ) -> Result<Vec<Ident>, ParserError> {
4690 if self.consume_token(&Token::LParen) {
4691 if allow_empty && self.peek_token().token == Token::RParen {
4692 self.next_token();
4693 Ok(vec![])
4694 } else {
4695 let cols = self.parse_comma_separated(Parser::parse_identifier)?;
4696 self.expect_token(&Token::RParen)?;
4697 Ok(cols)
4698 }
4699 } else if optional == Optional {
4700 Ok(vec![])
4701 } else {
4702 self.expected("a list of columns in parentheses", self.peek_token())
4703 }
4704 }
4705
4706 pub fn parse_precision(&mut self) -> Result<u64, ParserError> {
4707 self.expect_token(&Token::LParen)?;
4708 let n = self.parse_literal_uint()?;
4709 self.expect_token(&Token::RParen)?;
4710 Ok(n)
4711 }
4712
4713 pub fn parse_optional_precision(&mut self) -> Result<Option<u64>, ParserError> {
4714 if self.consume_token(&Token::LParen) {
4715 let n = self.parse_literal_uint()?;
4716 self.expect_token(&Token::RParen)?;
4717 Ok(Some(n))
4718 } else {
4719 Ok(None)
4720 }
4721 }
4722
4723 pub fn parse_optional_character_length(
4724 &mut self,
4725 ) -> Result<Option<CharacterLength>, ParserError> {
4726 if self.consume_token(&Token::LParen) {
4727 let character_length = self.parse_character_length()?;
4728 self.expect_token(&Token::RParen)?;
4729 Ok(Some(character_length))
4730 } else {
4731 Ok(None)
4732 }
4733 }
4734
4735 pub fn parse_character_length(&mut self) -> Result<CharacterLength, ParserError> {
4736 let length = self.parse_literal_uint()?;
4737 let unit = if self.parse_keyword(Keyword::CHARACTERS) {
4738 Some(CharLengthUnits::Characters)
4739 } else if self.parse_keyword(Keyword::OCTETS) {
4740 Some(CharLengthUnits::Octets)
4741 } else {
4742 None
4743 };
4744
4745 Ok(CharacterLength { length, unit })
4746 }
4747
4748 pub fn parse_optional_precision_scale(
4749 &mut self,
4750 ) -> Result<(Option<u64>, Option<u64>), ParserError> {
4751 if self.consume_token(&Token::LParen) {
4752 let n = self.parse_literal_uint()?;
4753 let scale = if self.consume_token(&Token::Comma) {
4754 Some(self.parse_literal_uint()?)
4755 } else {
4756 None
4757 };
4758 self.expect_token(&Token::RParen)?;
4759 Ok((Some(n), scale))
4760 } else {
4761 Ok((None, None))
4762 }
4763 }
4764
4765 pub fn parse_exact_number_optional_precision_scale(
4766 &mut self,
4767 ) -> Result<ExactNumberInfo, ParserError> {
4768 if self.consume_token(&Token::LParen) {
4769 let precision = self.parse_literal_uint()?;
4770 let scale = if self.consume_token(&Token::Comma) {
4771 Some(self.parse_literal_uint()?)
4772 } else {
4773 None
4774 };
4775
4776 self.expect_token(&Token::RParen)?;
4777
4778 match scale {
4779 None => Ok(ExactNumberInfo::Precision(precision)),
4780 Some(scale) => Ok(ExactNumberInfo::PrecisionAndScale(precision, scale)),
4781 }
4782 } else {
4783 Ok(ExactNumberInfo::None)
4784 }
4785 }
4786
4787 pub fn parse_optional_type_modifiers(&mut self) -> Result<Option<Vec<String>>, ParserError> {
4788 if self.consume_token(&Token::LParen) {
4789 let mut modifiers = Vec::new();
4790 loop {
4791 let next_token = self.next_token();
4792 match next_token.token {
4793 Token::Word(w) => modifiers.push(w.to_string()),
4794 Token::Number(n, _) => modifiers.push(n),
4795 Token::SingleQuotedString(s) => modifiers.push(s),
4796
4797 Token::Comma => {
4798 continue;
4799 }
4800 Token::RParen => {
4801 break;
4802 }
4803 _ => self.expected("type modifiers", next_token)?,
4804 }
4805 }
4806
4807 Ok(Some(modifiers))
4808 } else {
4809 Ok(None)
4810 }
4811 }
4812
4813 pub fn parse_delete(&mut self) -> Result<Statement, ParserError> {
4814 self.expect_keyword(Keyword::FROM)?;
4815 let table_name = self.parse_table_factor()?;
4816 let using = if self.parse_keyword(Keyword::USING) {
4817 Some(self.parse_table_factor()?)
4818 } else {
4819 None
4820 };
4821 let selection = if self.parse_keyword(Keyword::WHERE) {
4822 Some(self.parse_expr()?)
4823 } else {
4824 None
4825 };
4826
4827 let returning = if self.parse_keyword(Keyword::RETURNING) {
4828 Some(self.parse_comma_separated(Parser::parse_select_item)?)
4829 } else {
4830 None
4831 };
4832
4833 Ok(Statement::Delete {
4834 table_name,
4835 using,
4836 selection,
4837 returning,
4838 })
4839 }
4840
4841 pub fn parse_kill(&mut self) -> Result<Statement, ParserError> {
4843 let modifier_keyword =
4844 self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]);
4845
4846 let id = self.parse_literal_uint()?;
4847
4848 let modifier = match modifier_keyword {
4849 Some(Keyword::CONNECTION) => Some(KillType::Connection),
4850 Some(Keyword::QUERY) => Some(KillType::Query),
4851 Some(Keyword::MUTATION) => {
4852 if dialect_of!(self is ClickHouseDialect | GenericDialect) {
4853 Some(KillType::Mutation)
4854 } else {
4855 self.expected(
4856 "Unsupported type for KILL, allowed: CONNECTION | QUERY",
4857 self.peek_token(),
4858 )?
4859 }
4860 }
4861 _ => None,
4862 };
4863
4864 Ok(Statement::Kill { modifier, id })
4865 }
4866
4867 pub fn parse_explain(&mut self, describe_alias: bool) -> Result<Statement, ParserError> {
4868 let analyze = self.parse_keyword(Keyword::ANALYZE);
4869 let verbose = self.parse_keyword(Keyword::VERBOSE);
4870 let mut format = None;
4871 if self.parse_keyword(Keyword::FORMAT) {
4872 format = Some(self.parse_analyze_format()?);
4873 }
4874
4875 match self.maybe_parse(|parser| parser.parse_statement()) {
4876 Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
4877 ParserError::ParserError("Explain must be root of the plan".to_string()),
4878 ),
4879 Some(statement) => Ok(Statement::Explain {
4880 describe_alias,
4881 analyze,
4882 verbose,
4883 statement: Box::new(statement),
4884 format,
4885 }),
4886 _ => {
4887 let table_name = self.parse_object_name()?;
4888 Ok(Statement::ExplainTable {
4889 describe_alias,
4890 table_name,
4891 })
4892 }
4893 }
4894 }
4895
4896 pub fn parse_query(&mut self) -> Result<Query, ParserError> {
4901 let _guard = self.recursion_counter.try_decrease()?;
4902 let with = if self.parse_keyword(Keyword::WITH) {
4903 Some(With {
4904 recursive: self.parse_keyword(Keyword::RECURSIVE),
4905 cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
4906 })
4907 } else {
4908 None
4909 };
4910
4911 if !self.parse_keyword(Keyword::INSERT) {
4912 let body = Box::new(self.parse_query_body(0)?);
4913
4914 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
4915 self.parse_comma_separated(Parser::parse_order_by_expr)?
4916 } else {
4917 vec![]
4918 };
4919
4920 let mut limit = None;
4921 let mut offset = None;
4922
4923 for _x in 0..2 {
4924 if limit.is_none() && self.parse_keyword(Keyword::LIMIT) {
4925 limit = self.parse_limit()?
4926 }
4927
4928 if offset.is_none() && self.parse_keyword(Keyword::OFFSET) {
4929 offset = Some(self.parse_offset()?)
4930 }
4931
4932 if dialect_of!(self is GenericDialect | MySqlDialect)
4933 && limit.is_some()
4934 && offset.is_none()
4935 && self.consume_token(&Token::Comma)
4936 {
4937 offset = Some(Offset {
4940 value: limit.unwrap(),
4941 rows: OffsetRows::None,
4942 });
4943 limit = Some(self.parse_expr()?);
4944 }
4945 }
4946
4947 let fetch = if self.parse_keyword(Keyword::FETCH) {
4948 Some(self.parse_fetch()?)
4949 } else {
4950 None
4951 };
4952
4953 let mut locks = Vec::new();
4954 while self.parse_keyword(Keyword::FOR) {
4955 locks.push(self.parse_lock()?);
4956 }
4957
4958 Ok(Query {
4959 with,
4960 body,
4961 order_by,
4962 limit,
4963 offset,
4964 fetch,
4965 locks,
4966 })
4967 } else {
4968 let insert = self.parse_insert()?;
4969
4970 Ok(Query {
4971 with,
4972 body: Box::new(SetExpr::Insert(insert)),
4973 limit: None,
4974 order_by: vec![],
4975 offset: None,
4976 fetch: None,
4977 locks: vec![],
4978 })
4979 }
4980 }
4981
4982 pub fn parse_cte(&mut self) -> Result<Cte, ParserError> {
4984 let name = self.parse_identifier()?;
4985
4986 let mut cte = if self.parse_keyword(Keyword::AS) {
4987 self.expect_token(&Token::LParen)?;
4988 let query = Box::new(self.parse_query()?);
4989 self.expect_token(&Token::RParen)?;
4990 let alias = TableAlias {
4991 name,
4992 columns: vec![],
4993 };
4994 Cte {
4995 alias,
4996 query,
4997 from: None,
4998 }
4999 } else {
5000 let columns = self.parse_parenthesized_column_list(Optional, false)?;
5001 self.expect_keyword(Keyword::AS)?;
5002 self.expect_token(&Token::LParen)?;
5003 let query = Box::new(self.parse_query()?);
5004 self.expect_token(&Token::RParen)?;
5005 let alias = TableAlias { name, columns };
5006 Cte {
5007 alias,
5008 query,
5009 from: None,
5010 }
5011 };
5012 if self.parse_keyword(Keyword::FROM) {
5013 cte.from = Some(self.parse_identifier()?);
5014 }
5015 Ok(cte)
5016 }
5017
5018 pub fn parse_query_body(&mut self, precedence: u8) -> Result<SetExpr, ParserError> {
5027 let mut expr = if self.parse_keyword(Keyword::SELECT) {
5030 SetExpr::Select(Box::new(self.parse_select()?))
5031 } else if self.consume_token(&Token::LParen) {
5032 let subquery = self.parse_query()?;
5034 self.expect_token(&Token::RParen)?;
5035 SetExpr::Query(Box::new(subquery))
5036 } else if self.parse_keyword(Keyword::VALUES) {
5037 let is_mysql = dialect_of!(self is MySqlDialect);
5038 SetExpr::Values(self.parse_values(is_mysql)?)
5039 } else if self.parse_keyword(Keyword::TABLE) {
5040 SetExpr::Table(Box::new(self.parse_as_table()?))
5041 } else {
5042 return self.expected(
5043 "SELECT, VALUES, or a subquery in the query body",
5044 self.peek_token(),
5045 );
5046 };
5047
5048 loop {
5049 let op = self.parse_set_operator(&self.peek_token().token);
5051 let next_precedence = match op {
5052 Some(SetOperator::Union) | Some(SetOperator::Except) => 10,
5054 Some(SetOperator::Intersect) => 20,
5056 None => break,
5058 };
5059 if precedence >= next_precedence {
5060 break;
5061 }
5062 self.next_token(); let set_quantifier = self.parse_set_quantifier(&op);
5064 expr = SetExpr::SetOperation {
5065 left: Box::new(expr),
5066 op: op.unwrap(),
5067 set_quantifier,
5068 right: Box::new(self.parse_query_body(next_precedence)?),
5069 };
5070 }
5071
5072 Ok(expr)
5073 }
5074
5075 pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
5076 match token {
5077 Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union),
5078 Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except),
5079 Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect),
5080 _ => None,
5081 }
5082 }
5083
5084 pub fn parse_set_quantifier(&mut self, op: &Option<SetOperator>) -> SetQuantifier {
5085 match op {
5086 Some(SetOperator::Union) => {
5087 if self.parse_keyword(Keyword::ALL) {
5088 SetQuantifier::All
5089 } else if self.parse_keyword(Keyword::DISTINCT) {
5090 SetQuantifier::Distinct
5091 } else {
5092 SetQuantifier::None
5093 }
5094 }
5095 Some(SetOperator::Except) | Some(SetOperator::Intersect) => {
5096 if self.parse_keyword(Keyword::ALL) {
5097 SetQuantifier::All
5098 } else if self.parse_keyword(Keyword::DISTINCT) {
5099 SetQuantifier::Distinct
5100 } else {
5101 SetQuantifier::None
5102 }
5103 }
5104 _ => SetQuantifier::None,
5105 }
5106 }
5107
5108 pub fn parse_select(&mut self) -> Result<Select, ParserError> {
5111 let distinct = self.parse_all_or_distinct()?;
5112
5113 let top = if self.parse_keyword(Keyword::TOP) {
5114 Some(self.parse_top()?)
5115 } else {
5116 None
5117 };
5118
5119 let projection = self.parse_projection()?;
5120
5121 let into = if self.parse_keyword(Keyword::INTO) {
5122 let temporary = self
5123 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
5124 .is_some();
5125 let unlogged = self.parse_keyword(Keyword::UNLOGGED);
5126 let table = self.parse_keyword(Keyword::TABLE);
5127 let name = self.parse_object_name()?;
5128 Some(SelectInto {
5129 temporary,
5130 unlogged,
5131 table,
5132 name,
5133 })
5134 } else {
5135 None
5136 };
5137
5138 let from = if self.parse_keyword(Keyword::FROM) {
5144 self.parse_comma_separated(Parser::parse_table_and_joins)?
5145 } else {
5146 vec![]
5147 };
5148
5149 let mut lateral_views = vec![];
5150 loop {
5151 if self.parse_keywords(&[Keyword::LATERAL, Keyword::VIEW]) {
5152 let outer = self.parse_keyword(Keyword::OUTER);
5153 let lateral_view = self.parse_expr()?;
5154 let lateral_view_name = self.parse_object_name()?;
5155 let lateral_col_alias = self
5156 .parse_comma_separated(|parser| {
5157 parser.parse_optional_alias(&[
5158 Keyword::WHERE,
5159 Keyword::GROUP,
5160 Keyword::CLUSTER,
5161 Keyword::HAVING,
5162 Keyword::LATERAL,
5163 ]) })?
5165 .into_iter()
5166 .flatten()
5167 .collect();
5168
5169 lateral_views.push(LateralView {
5170 lateral_view,
5171 lateral_view_name,
5172 lateral_col_alias,
5173 outer,
5174 });
5175 } else {
5176 break;
5177 }
5178 }
5179
5180 let selection = if self.parse_keyword(Keyword::WHERE) {
5181 Some(self.parse_expr()?)
5182 } else {
5183 None
5184 };
5185
5186 let group_by = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
5187 self.parse_comma_separated(Parser::parse_group_by_expr)?
5188 } else {
5189 vec![]
5190 };
5191
5192 let cluster_by = if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
5193 self.parse_comma_separated(Parser::parse_expr)?
5194 } else {
5195 vec![]
5196 };
5197
5198 let distribute_by = if self.parse_keywords(&[Keyword::DISTRIBUTE, Keyword::BY]) {
5199 self.parse_comma_separated(Parser::parse_expr)?
5200 } else {
5201 vec![]
5202 };
5203
5204 let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
5205 self.parse_comma_separated(Parser::parse_expr)?
5206 } else {
5207 vec![]
5208 };
5209
5210 let having = if self.parse_keyword(Keyword::HAVING) {
5211 Some(self.parse_expr()?)
5212 } else {
5213 None
5214 };
5215
5216 let qualify = if self.parse_keyword(Keyword::QUALIFY) {
5217 Some(self.parse_expr()?)
5218 } else {
5219 None
5220 };
5221
5222 Ok(Select {
5223 distinct,
5224 top,
5225 projection,
5226 into,
5227 from,
5228 lateral_views,
5229 selection,
5230 group_by,
5231 cluster_by,
5232 distribute_by,
5233 sort_by,
5234 having,
5235 qualify,
5236 })
5237 }
5238
5239 pub fn parse_as_table(&mut self) -> Result<Table, ParserError> {
5241 let token1 = self.next_token();
5242 let token2 = self.next_token();
5243 let token3 = self.next_token();
5244
5245 let table_name;
5246 let schema_name;
5247 if token2 == Token::Period {
5248 match token1.token {
5249 Token::Word(w) => {
5250 schema_name = w.value;
5251 }
5252 _ => {
5253 return self.expected("Schema name", token1);
5254 }
5255 }
5256 match token3.token {
5257 Token::Word(w) => {
5258 table_name = w.value;
5259 }
5260 _ => {
5261 return self.expected("Table name", token3);
5262 }
5263 }
5264 Ok(Table {
5265 table_name: Some(table_name),
5266 schema_name: Some(schema_name),
5267 })
5268 } else {
5269 match token1.token {
5270 Token::Word(w) => {
5271 table_name = w.value;
5272 }
5273 _ => {
5274 return self.expected("Table name", token1);
5275 }
5276 }
5277 Ok(Table {
5278 table_name: Some(table_name),
5279 schema_name: None,
5280 })
5281 }
5282 }
5283
5284 pub fn parse_set(&mut self) -> Result<Statement, ParserError> {
5285 let modifier =
5286 self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::HIVEVAR]);
5287 if let Some(Keyword::HIVEVAR) = modifier {
5288 self.expect_token(&Token::Colon)?;
5289 } else if self.parse_keyword(Keyword::ROLE) {
5290 let context_modifier = match modifier {
5291 Some(keyword) if keyword == Keyword::LOCAL => ContextModifier::Local,
5292 Some(keyword) if keyword == Keyword::SESSION => ContextModifier::Session,
5293 _ => ContextModifier::None,
5294 };
5295
5296 let role_name = if self.parse_keyword(Keyword::NONE) {
5297 None
5298 } else {
5299 Some(self.parse_identifier()?)
5300 };
5301 return Ok(Statement::SetRole {
5302 context_modifier,
5303 role_name,
5304 });
5305 }
5306
5307 let variable = if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE]) {
5308 ObjectName(vec!["TIMEZONE".into()])
5309 } else {
5310 self.parse_object_name()?
5311 };
5312
5313 if variable.to_string().eq_ignore_ascii_case("NAMES")
5314 && dialect_of!(self is MySqlDialect | GenericDialect)
5315 {
5316 if self.parse_keyword(Keyword::DEFAULT) {
5317 return Ok(Statement::SetNamesDefault {});
5318 }
5319
5320 let charset_name = self.parse_literal_string()?;
5321 let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() {
5322 Some(self.parse_literal_string()?)
5323 } else {
5324 None
5325 };
5326
5327 Ok(Statement::SetNames {
5328 charset_name,
5329 collation_name,
5330 })
5331 } else if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
5332 let mut values = vec![];
5333 loop {
5334 let value = if let Ok(expr) = self.parse_expr() {
5335 expr
5336 } else {
5337 self.expected("variable value", self.peek_token())?
5338 };
5339
5340 values.push(value);
5341 if self.consume_token(&Token::Comma) {
5342 continue;
5343 }
5344 return Ok(Statement::SetVariable {
5345 local: modifier == Some(Keyword::LOCAL),
5346 hivevar: Some(Keyword::HIVEVAR) == modifier,
5347 variable,
5348 value: values,
5349 });
5350 }
5351 } else if variable.to_string().eq_ignore_ascii_case("TIMEZONE") {
5352 match self.parse_expr() {
5354 Ok(expr) => Ok(Statement::SetTimeZone {
5355 local: modifier == Some(Keyword::LOCAL),
5356 value: expr,
5357 }),
5358 _ => self.expected("timezone value", self.peek_token())?,
5359 }
5360 } else if variable.to_string() == "CHARACTERISTICS" {
5361 self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?;
5362 Ok(Statement::SetTransaction {
5363 modes: self.parse_transaction_modes()?,
5364 snapshot: None,
5365 session: true,
5366 })
5367 } else if variable.to_string() == "TRANSACTION" && modifier.is_none() {
5368 if self.parse_keyword(Keyword::SNAPSHOT) {
5369 let snaphot_id = self.parse_value()?;
5370 return Ok(Statement::SetTransaction {
5371 modes: vec![],
5372 snapshot: Some(snaphot_id),
5373 session: false,
5374 });
5375 }
5376 Ok(Statement::SetTransaction {
5377 modes: self.parse_transaction_modes()?,
5378 snapshot: None,
5379 session: false,
5380 })
5381 } else {
5382 self.expected("equals sign or TO", self.peek_token())
5383 }
5384 }
5385
5386 pub fn parse_show(&mut self) -> Result<Statement, ParserError> {
5387 let extended = self.parse_keyword(Keyword::EXTENDED);
5388 let full = self.parse_keyword(Keyword::FULL);
5389 if self
5390 .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS])
5391 .is_some()
5392 {
5393 Ok(self.parse_show_columns(extended, full)?)
5394 } else if self.parse_keyword(Keyword::TABLES) {
5395 Ok(self.parse_show_tables(extended, full)?)
5396 } else if self.parse_keyword(Keyword::FUNCTIONS) {
5397 Ok(self.parse_show_functions()?)
5398 } else if extended || full {
5399 Err(ParserError::ParserError(
5400 "EXTENDED/FULL are not supported with this type of SHOW query".to_string(),
5401 ))
5402 } else if self.parse_one_of_keywords(&[Keyword::CREATE]).is_some() {
5403 Ok(self.parse_show_create()?)
5404 } else if self.parse_keyword(Keyword::COLLATION) {
5405 Ok(self.parse_show_collation()?)
5406 } else if self.parse_keyword(Keyword::VARIABLES)
5407 && dialect_of!(self is MySqlDialect | GenericDialect)
5408 {
5409 Ok(Statement::ShowVariables {
5411 filter: self.parse_show_statement_filter()?,
5412 })
5413 } else {
5414 Ok(Statement::ShowVariable {
5415 variable: self.parse_identifiers()?,
5416 })
5417 }
5418 }
5419
5420 pub fn parse_show_create(&mut self) -> Result<Statement, ParserError> {
5421 let obj_type = match self.expect_one_of_keywords(&[
5422 Keyword::TABLE,
5423 Keyword::TRIGGER,
5424 Keyword::FUNCTION,
5425 Keyword::PROCEDURE,
5426 Keyword::EVENT,
5427 Keyword::VIEW,
5428 ])? {
5429 Keyword::TABLE => Ok(ShowCreateObject::Table),
5430 Keyword::TRIGGER => Ok(ShowCreateObject::Trigger),
5431 Keyword::FUNCTION => Ok(ShowCreateObject::Function),
5432 Keyword::PROCEDURE => Ok(ShowCreateObject::Procedure),
5433 Keyword::EVENT => Ok(ShowCreateObject::Event),
5434 Keyword::VIEW => Ok(ShowCreateObject::View),
5435 keyword => Err(ParserError::ParserError(format!(
5436 "Unable to map keyword to ShowCreateObject: {keyword:?}"
5437 ))),
5438 }?;
5439
5440 let obj_name = self.parse_object_name()?;
5441
5442 Ok(Statement::ShowCreate { obj_type, obj_name })
5443 }
5444
5445 pub fn parse_show_columns(
5446 &mut self,
5447 extended: bool,
5448 full: bool,
5449 ) -> Result<Statement, ParserError> {
5450 self.expect_one_of_keywords(&[Keyword::FROM, Keyword::IN])?;
5451 let object_name = self.parse_object_name()?;
5452 let table_name = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
5453 Some(_) => {
5454 let db_name = vec![self.parse_identifier()?];
5455 let ObjectName(table_name) = object_name;
5456 let object_name = db_name.into_iter().chain(table_name.into_iter()).collect();
5457 ObjectName(object_name)
5458 }
5459 None => object_name,
5460 };
5461 let filter = self.parse_show_statement_filter()?;
5462 Ok(Statement::ShowColumns {
5463 extended,
5464 full,
5465 table_name,
5466 filter,
5467 })
5468 }
5469
5470 pub fn parse_show_tables(
5471 &mut self,
5472 extended: bool,
5473 full: bool,
5474 ) -> Result<Statement, ParserError> {
5475 let db_name = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
5476 Some(_) => Some(self.parse_identifier()?),
5477 None => None,
5478 };
5479 let filter = self.parse_show_statement_filter()?;
5480 Ok(Statement::ShowTables {
5481 extended,
5482 full,
5483 db_name,
5484 filter,
5485 })
5486 }
5487
5488 pub fn parse_show_functions(&mut self) -> Result<Statement, ParserError> {
5489 let filter = self.parse_show_statement_filter()?;
5490 Ok(Statement::ShowFunctions { filter })
5491 }
5492
5493 pub fn parse_show_collation(&mut self) -> Result<Statement, ParserError> {
5494 let filter = self.parse_show_statement_filter()?;
5495 Ok(Statement::ShowCollation { filter })
5496 }
5497
5498 pub fn parse_show_statement_filter(
5499 &mut self,
5500 ) -> Result<Option<ShowStatementFilter>, ParserError> {
5501 if self.parse_keyword(Keyword::LIKE) {
5502 Ok(Some(ShowStatementFilter::Like(
5503 self.parse_literal_string()?,
5504 )))
5505 } else if self.parse_keyword(Keyword::ILIKE) {
5506 Ok(Some(ShowStatementFilter::ILike(
5507 self.parse_literal_string()?,
5508 )))
5509 } else if self.parse_keyword(Keyword::WHERE) {
5510 Ok(Some(ShowStatementFilter::Where(self.parse_expr()?)))
5511 } else {
5512 Ok(None)
5513 }
5514 }
5515
5516 pub fn parse_use(&mut self) -> Result<Statement, ParserError> {
5517 let db_name = self.parse_identifier()?;
5518 Ok(Statement::Use { db_name })
5519 }
5520
5521 pub fn parse_table_and_joins(&mut self) -> Result<TableWithJoins, ParserError> {
5522 let relation = self.parse_table_factor()?;
5523 let mut joins = vec![];
5527 loop {
5528 let join = if self.parse_keyword(Keyword::CROSS) {
5529 let join_operator = if self.parse_keyword(Keyword::JOIN) {
5530 JoinOperator::CrossJoin
5531 } else if self.parse_keyword(Keyword::APPLY) {
5532 JoinOperator::CrossApply
5534 } else {
5535 return self.expected("JOIN or APPLY after CROSS", self.peek_token());
5536 };
5537 Join {
5538 relation: self.parse_table_factor()?,
5539 join_operator,
5540 }
5541 } else if self.parse_keyword(Keyword::OUTER) {
5542 self.expect_keyword(Keyword::APPLY)?;
5544 Join {
5545 relation: self.parse_table_factor()?,
5546 join_operator: JoinOperator::OuterApply,
5547 }
5548 } else {
5549 let natural = self.parse_keyword(Keyword::NATURAL);
5550 let peek_keyword = if let Token::Word(w) = self.peek_token().token {
5551 w.keyword
5552 } else {
5553 Keyword::NoKeyword
5554 };
5555
5556 let join_operator_type = match peek_keyword {
5557 Keyword::INNER | Keyword::JOIN => {
5558 let _ = self.parse_keyword(Keyword::INNER); self.expect_keyword(Keyword::JOIN)?;
5560 JoinOperator::Inner
5561 }
5562 kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
5563 let _ = self.next_token(); let is_left = kw == Keyword::LEFT;
5565 let join_type = self.parse_one_of_keywords(&[
5566 Keyword::OUTER,
5567 Keyword::SEMI,
5568 Keyword::ANTI,
5569 Keyword::JOIN,
5570 ]);
5571 match join_type {
5572 Some(Keyword::OUTER) => {
5573 self.expect_keyword(Keyword::JOIN)?;
5574 if is_left {
5575 JoinOperator::LeftOuter
5576 } else {
5577 JoinOperator::RightOuter
5578 }
5579 }
5580 Some(Keyword::SEMI) => {
5581 self.expect_keyword(Keyword::JOIN)?;
5582 if is_left {
5583 JoinOperator::LeftSemi
5584 } else {
5585 JoinOperator::RightSemi
5586 }
5587 }
5588 Some(Keyword::ANTI) => {
5589 self.expect_keyword(Keyword::JOIN)?;
5590 if is_left {
5591 JoinOperator::LeftAnti
5592 } else {
5593 JoinOperator::RightAnti
5594 }
5595 }
5596 Some(Keyword::JOIN) => {
5597 if is_left {
5598 JoinOperator::LeftOuter
5599 } else {
5600 JoinOperator::RightOuter
5601 }
5602 }
5603 _ => {
5604 return Err(ParserError::ParserError(format!(
5605 "expected OUTER, SEMI, ANTI or JOIN after {kw:?}"
5606 )))
5607 }
5608 }
5609 }
5610 Keyword::FULL => {
5611 let _ = self.next_token(); let _ = self.parse_keyword(Keyword::OUTER); self.expect_keyword(Keyword::JOIN)?;
5614 JoinOperator::FullOuter
5615 }
5616 Keyword::OUTER => {
5617 return self.expected("LEFT, RIGHT, or FULL", self.peek_token());
5618 }
5619 _ if natural => {
5620 return self.expected("a join type after NATURAL", self.peek_token());
5621 }
5622 _ => break,
5623 };
5624 let relation = self.parse_table_factor()?;
5625 let join_constraint = self.parse_join_constraint(natural)?;
5626 Join {
5627 relation,
5628 join_operator: join_operator_type(join_constraint),
5629 }
5630 };
5631 joins.push(join);
5632 }
5633 Ok(TableWithJoins { relation, joins })
5634 }
5635
5636 pub fn parse_table_factor(&mut self) -> Result<TableFactor, ParserError> {
5638 if self.parse_keyword(Keyword::LATERAL) {
5639 if !self.consume_token(&Token::LParen) {
5641 self.expected("subquery after LATERAL", self.peek_token())?;
5642 }
5643 self.parse_derived_table_factor(Lateral)
5644 } else if self.parse_keyword(Keyword::TABLE) {
5645 self.expect_token(&Token::LParen)?;
5647 let expr = self.parse_expr()?;
5648 self.expect_token(&Token::RParen)?;
5649 let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
5650 Ok(TableFactor::TableFunction { expr, alias })
5651 } else if self.consume_token(&Token::LParen) {
5652 return_ok_if_some!(
5674 self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))
5675 );
5676 let mut table_and_joins = self.parse_table_and_joins()?;
5683
5684 #[allow(clippy::if_same_then_else)]
5685 if !table_and_joins.joins.is_empty() {
5686 self.expect_token(&Token::RParen)?;
5687 let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
5688 Ok(TableFactor::NestedJoin {
5689 table_with_joins: Box::new(table_and_joins),
5690 alias,
5691 }) } else if let TableFactor::NestedJoin {
5693 table_with_joins: _,
5694 alias: _,
5695 } = &table_and_joins.relation
5696 {
5697 self.expect_token(&Token::RParen)?;
5700 let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
5701 Ok(TableFactor::NestedJoin {
5702 table_with_joins: Box::new(table_and_joins),
5703 alias,
5704 })
5705 } else if dialect_of!(self is SnowflakeDialect | GenericDialect) {
5706 self.expect_token(&Token::RParen)?;
5713
5714 if let Some(outer_alias) =
5715 self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?
5716 {
5717 match &mut table_and_joins.relation {
5720 TableFactor::Derived { alias, .. }
5721 | TableFactor::Table { alias, .. }
5722 | TableFactor::UNNEST { alias, .. }
5723 | TableFactor::TableFunction { alias, .. }
5724 | TableFactor::NestedJoin { alias, .. } => {
5725 if let Some(inner_alias) = alias {
5727 return Err(ParserError::ParserError(format!(
5728 "duplicate alias {inner_alias}"
5729 )));
5730 }
5731 alias.replace(outer_alias);
5735 }
5736 };
5737 }
5738 Ok(table_and_joins.relation)
5740 } else {
5741 self.expected("joined table", self.peek_token())
5744 }
5745 } else if dialect_of!(self is BigQueryDialect | GenericDialect)
5746 && self.parse_keyword(Keyword::UNNEST)
5747 {
5748 self.expect_token(&Token::LParen)?;
5749 let expr = self.parse_expr()?;
5750 self.expect_token(&Token::RParen)?;
5751
5752 let alias = match self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS) {
5753 Ok(Some(alias)) => Some(alias),
5754 Ok(None) => None,
5755 Err(e) => return Err(e),
5756 };
5757
5758 let with_offset = match self.expect_keywords(&[Keyword::WITH, Keyword::OFFSET]) {
5759 Ok(()) => true,
5760 Err(_) => false,
5761 };
5762
5763 let with_offset_alias = if with_offset {
5764 match self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) {
5765 Ok(Some(alias)) => Some(alias),
5766 Ok(None) => None,
5767 Err(e) => return Err(e),
5768 }
5769 } else {
5770 None
5771 };
5772
5773 Ok(TableFactor::UNNEST {
5774 alias,
5775 array_expr: Box::new(expr),
5776 with_offset,
5777 with_offset_alias,
5778 })
5779 } else {
5780 let name = self.parse_object_name()?;
5781 let args = if self.consume_token(&Token::LParen) {
5783 Some(self.parse_optional_args()?)
5784 } else {
5785 None
5786 };
5787 let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
5788 let mut with_hints = vec![];
5790 if self.parse_keyword(Keyword::WITH) {
5791 if self.consume_token(&Token::LParen) {
5792 with_hints = self.parse_comma_separated(Parser::parse_expr)?;
5793 self.expect_token(&Token::RParen)?;
5794 } else {
5795 self.prev_token();
5797 }
5798 };
5799 Ok(TableFactor::Table {
5800 name,
5801 alias,
5802 args,
5803 with_hints,
5804 })
5805 }
5806 }
5807
5808 pub fn parse_derived_table_factor(
5809 &mut self,
5810 lateral: IsLateral,
5811 ) -> Result<TableFactor, ParserError> {
5812 let subquery = Box::new(self.parse_query()?);
5813 self.expect_token(&Token::RParen)?;
5814 let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
5815 Ok(TableFactor::Derived {
5816 lateral: match lateral {
5817 Lateral => true,
5818 NotLateral => false,
5819 },
5820 subquery,
5821 alias,
5822 })
5823 }
5824
5825 pub fn parse_join_constraint(&mut self, natural: bool) -> Result<JoinConstraint, ParserError> {
5826 if natural {
5827 Ok(JoinConstraint::Natural)
5828 } else if self.parse_keyword(Keyword::ON) {
5829 let constraint = self.parse_expr()?;
5830 Ok(JoinConstraint::On(constraint))
5831 } else if self.parse_keyword(Keyword::USING) {
5832 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
5833 Ok(JoinConstraint::Using(columns))
5834 } else {
5835 Ok(JoinConstraint::None)
5836 }
5838 }
5839
5840 pub fn parse_grant(&mut self) -> Result<Statement, ParserError> {
5842 let (privileges, objects) = self.parse_grant_revoke_privileges_objects()?;
5843
5844 self.expect_keyword(Keyword::TO)?;
5845 let grantees = self.parse_comma_separated(Parser::parse_identifier)?;
5846
5847 let with_grant_option =
5848 self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
5849
5850 let granted_by = self
5851 .parse_keywords(&[Keyword::GRANTED, Keyword::BY])
5852 .then(|| self.parse_identifier().unwrap());
5853
5854 Ok(Statement::Grant {
5855 privileges,
5856 objects,
5857 grantees,
5858 with_grant_option,
5859 granted_by,
5860 })
5861 }
5862
5863 pub fn parse_grant_revoke_privileges_objects(
5864 &mut self,
5865 ) -> Result<(Privileges, GrantObjects), ParserError> {
5866 let privileges = if self.parse_keyword(Keyword::ALL) {
5867 Privileges::All {
5868 with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
5869 }
5870 } else {
5871 let (actions, err): (Vec<_>, Vec<_>) = self
5872 .parse_comma_separated(Parser::parse_grant_permission)?
5873 .into_iter()
5874 .map(|(kw, columns)| match kw {
5875 Keyword::DELETE => Ok(Action::Delete),
5876 Keyword::INSERT => Ok(Action::Insert { columns }),
5877 Keyword::REFERENCES => Ok(Action::References { columns }),
5878 Keyword::SELECT => Ok(Action::Select { columns }),
5879 Keyword::TRIGGER => Ok(Action::Trigger),
5880 Keyword::TRUNCATE => Ok(Action::Truncate),
5881 Keyword::UPDATE => Ok(Action::Update { columns }),
5882 Keyword::USAGE => Ok(Action::Usage),
5883 Keyword::CONNECT => Ok(Action::Connect),
5884 Keyword::CREATE => Ok(Action::Create),
5885 Keyword::EXECUTE => Ok(Action::Execute),
5886 Keyword::TEMPORARY => Ok(Action::Temporary),
5887 _ => Err(kw),
5891 })
5892 .partition(Result::is_ok);
5893
5894 if !err.is_empty() {
5895 let errors: Vec<Keyword> = err.into_iter().filter_map(|x| x.err()).collect();
5896 return Err(ParserError::ParserError(format!(
5897 "INTERNAL ERROR: GRANT/REVOKE unexpected keyword(s) - {errors:?}"
5898 )));
5899 }
5900 let act = actions.into_iter().filter_map(|x| x.ok()).collect();
5901 Privileges::Actions(act)
5902 };
5903
5904 self.expect_keyword(Keyword::ON)?;
5905
5906 let objects = if self.parse_keywords(&[
5907 Keyword::ALL,
5908 Keyword::TABLES,
5909 Keyword::IN,
5910 Keyword::SCHEMA,
5911 ]) {
5912 GrantObjects::AllTablesInSchema {
5913 schemas: self.parse_comma_separated(Parser::parse_object_name)?,
5914 }
5915 } else if self.parse_keywords(&[
5916 Keyword::ALL,
5917 Keyword::SEQUENCES,
5918 Keyword::IN,
5919 Keyword::SCHEMA,
5920 ]) {
5921 GrantObjects::AllSequencesInSchema {
5922 schemas: self.parse_comma_separated(Parser::parse_object_name)?,
5923 }
5924 } else {
5925 let object_type =
5926 self.parse_one_of_keywords(&[Keyword::SEQUENCE, Keyword::SCHEMA, Keyword::TABLE]);
5927 let objects = self.parse_comma_separated(Parser::parse_object_name);
5928 match object_type {
5929 Some(Keyword::SCHEMA) => GrantObjects::Schemas(objects?),
5930 Some(Keyword::SEQUENCE) => GrantObjects::Sequences(objects?),
5931 Some(Keyword::TABLE) | None => GrantObjects::Tables(objects?),
5932 _ => unreachable!(),
5933 }
5934 };
5935
5936 Ok((privileges, objects))
5937 }
5938
5939 pub fn parse_grant_permission(&mut self) -> Result<(Keyword, Option<Vec<Ident>>), ParserError> {
5940 if let Some(kw) = self.parse_one_of_keywords(&[
5941 Keyword::CONNECT,
5942 Keyword::CREATE,
5943 Keyword::DELETE,
5944 Keyword::EXECUTE,
5945 Keyword::INSERT,
5946 Keyword::REFERENCES,
5947 Keyword::SELECT,
5948 Keyword::TEMPORARY,
5949 Keyword::TRIGGER,
5950 Keyword::TRUNCATE,
5951 Keyword::UPDATE,
5952 Keyword::USAGE,
5953 ]) {
5954 let columns = match kw {
5955 Keyword::INSERT | Keyword::REFERENCES | Keyword::SELECT | Keyword::UPDATE => {
5956 let columns = self.parse_parenthesized_column_list(Optional, false)?;
5957 if columns.is_empty() {
5958 None
5959 } else {
5960 Some(columns)
5961 }
5962 }
5963 _ => None,
5964 };
5965 Ok((kw, columns))
5966 } else {
5967 self.expected("a privilege keyword", self.peek_token())?
5968 }
5969 }
5970
5971 pub fn parse_revoke(&mut self) -> Result<Statement, ParserError> {
5973 let (privileges, objects) = self.parse_grant_revoke_privileges_objects()?;
5974
5975 self.expect_keyword(Keyword::FROM)?;
5976 let grantees = self.parse_comma_separated(Parser::parse_identifier)?;
5977
5978 let granted_by = self
5979 .parse_keywords(&[Keyword::GRANTED, Keyword::BY])
5980 .then(|| self.parse_identifier().unwrap());
5981
5982 let cascade = self.parse_keyword(Keyword::CASCADE);
5983 let restrict = self.parse_keyword(Keyword::RESTRICT);
5984 if cascade && restrict {
5985 return parser_err!("Cannot specify both CASCADE and RESTRICT in REVOKE");
5986 }
5987
5988 Ok(Statement::Revoke {
5989 privileges,
5990 objects,
5991 grantees,
5992 granted_by,
5993 cascade,
5994 })
5995 }
5996
5997 pub fn parse_insert(&mut self) -> Result<Statement, ParserError> {
5999 let or = if !dialect_of!(self is SQLiteDialect) {
6000 None
6001 } else if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) {
6002 Some(SqliteOnConflict::Replace)
6003 } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) {
6004 Some(SqliteOnConflict::Rollback)
6005 } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) {
6006 Some(SqliteOnConflict::Abort)
6007 } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) {
6008 Some(SqliteOnConflict::Fail)
6009 } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) {
6010 Some(SqliteOnConflict::Ignore)
6011 } else if self.parse_keyword(Keyword::REPLACE) {
6012 Some(SqliteOnConflict::Replace)
6013 } else {
6014 None
6015 };
6016
6017 let action = self.parse_one_of_keywords(&[Keyword::INTO, Keyword::OVERWRITE]);
6018 let into = action == Some(Keyword::INTO);
6019 let overwrite = action == Some(Keyword::OVERWRITE);
6020
6021 let local = self.parse_keyword(Keyword::LOCAL);
6022
6023 if self.parse_keyword(Keyword::DIRECTORY) {
6024 let path = self.parse_literal_string()?;
6025 let file_format = if self.parse_keywords(&[Keyword::STORED, Keyword::AS]) {
6026 Some(self.parse_file_format()?)
6027 } else {
6028 None
6029 };
6030 let source = Box::new(self.parse_query()?);
6031 Ok(Statement::Directory {
6032 local,
6033 path,
6034 overwrite,
6035 file_format,
6036 source,
6037 })
6038 } else {
6039 let table = self.parse_keyword(Keyword::TABLE);
6041 let table_name = self.parse_object_name()?;
6042 let is_mysql = dialect_of!(self is MySqlDialect);
6043 let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?;
6044
6045 let partitioned = if self.parse_keyword(Keyword::PARTITION) {
6046 self.expect_token(&Token::LParen)?;
6047 let r = Some(self.parse_comma_separated(Parser::parse_expr)?);
6048 self.expect_token(&Token::RParen)?;
6049 r
6050 } else {
6051 None
6052 };
6053
6054 let after_columns = self.parse_parenthesized_column_list(Optional, false)?;
6056
6057 let source = Box::new(self.parse_query()?);
6058 let on = if self.parse_keyword(Keyword::ON) {
6059 if self.parse_keyword(Keyword::CONFLICT) {
6060 let conflict_target =
6061 if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) {
6062 Some(ConflictTarget::OnConstraint(self.parse_object_name()?))
6063 } else if self.peek_token() == Token::LParen {
6064 Some(ConflictTarget::Columns(
6065 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
6066 ))
6067 } else {
6068 None
6069 };
6070
6071 self.expect_keyword(Keyword::DO)?;
6072 let action = if self.parse_keyword(Keyword::NOTHING) {
6073 OnConflictAction::DoNothing
6074 } else {
6075 self.expect_keyword(Keyword::UPDATE)?;
6076 self.expect_keyword(Keyword::SET)?;
6077 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
6078 let selection = if self.parse_keyword(Keyword::WHERE) {
6079 Some(self.parse_expr()?)
6080 } else {
6081 None
6082 };
6083 OnConflictAction::DoUpdate(DoUpdate {
6084 assignments,
6085 selection,
6086 })
6087 };
6088
6089 Some(OnInsert::OnConflict(OnConflict {
6090 conflict_target,
6091 action,
6092 }))
6093 } else {
6094 self.expect_keyword(Keyword::DUPLICATE)?;
6095 self.expect_keyword(Keyword::KEY)?;
6096 self.expect_keyword(Keyword::UPDATE)?;
6097 let l = self.parse_comma_separated(Parser::parse_assignment)?;
6098
6099 Some(OnInsert::DuplicateKeyUpdate(l))
6100 }
6101 } else {
6102 None
6103 };
6104
6105 let returning = if self.parse_keyword(Keyword::RETURNING) {
6106 Some(self.parse_comma_separated(Parser::parse_select_item)?)
6107 } else {
6108 None
6109 };
6110
6111 Ok(Statement::Insert {
6112 or,
6113 table_name,
6114 into,
6115 overwrite,
6116 partitioned,
6117 columns,
6118 after_columns,
6119 source,
6120 table,
6121 on,
6122 returning,
6123 })
6124 }
6125 }
6126
6127 pub fn parse_update(&mut self) -> Result<Statement, ParserError> {
6128 let table = self.parse_table_and_joins()?;
6129 self.expect_keyword(Keyword::SET)?;
6130 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
6131 let from = if self.parse_keyword(Keyword::FROM)
6132 && dialect_of!(self is GenericDialect | PostgreSqlDialect | BigQueryDialect | SnowflakeDialect | RedshiftSqlDialect | MsSqlDialect)
6133 {
6134 Some(self.parse_table_and_joins()?)
6135 } else {
6136 None
6137 };
6138 let selection = if self.parse_keyword(Keyword::WHERE) {
6139 Some(self.parse_expr()?)
6140 } else {
6141 None
6142 };
6143 let returning = if self.parse_keyword(Keyword::RETURNING) {
6144 Some(self.parse_comma_separated(Parser::parse_select_item)?)
6145 } else {
6146 None
6147 };
6148 Ok(Statement::Update {
6149 table,
6150 assignments,
6151 from,
6152 selection,
6153 returning,
6154 })
6155 }
6156
6157 pub fn parse_assignment(&mut self) -> Result<Assignment, ParserError> {
6159 let id = self.parse_identifiers()?;
6160 self.expect_token(&Token::Eq)?;
6161 let value = self.parse_expr()?;
6162 Ok(Assignment { id, value })
6163 }
6164
6165 pub fn parse_function_args(&mut self) -> Result<FunctionArg, ParserError> {
6166 if self.peek_nth_token(1) == Token::RArrow {
6167 let name = self.parse_identifier()?;
6168
6169 self.expect_token(&Token::RArrow)?;
6170 let arg = self.parse_wildcard_expr()?.into();
6171
6172 Ok(FunctionArg::Named { name, arg })
6173 } else {
6174 Ok(FunctionArg::Unnamed(self.parse_wildcard_expr()?.into()))
6175 }
6176 }
6177
6178 pub fn parse_optional_args(&mut self) -> Result<Vec<FunctionArg>, ParserError> {
6179 if self.consume_token(&Token::RParen) {
6180 Ok(vec![])
6181 } else {
6182 let args = self.parse_comma_separated(Parser::parse_function_args)?;
6183 self.expect_token(&Token::RParen)?;
6184 Ok(args)
6185 }
6186 }
6187
6188 pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
6190 match self.parse_wildcard_expr()? {
6191 WildcardExpr::Expr(expr) => {
6192 let expr: Expr = if self.dialect.supports_filter_during_aggregation()
6193 && self.parse_keyword(Keyword::FILTER)
6194 {
6195 let i = self.index - 1;
6196 if self.consume_token(&Token::LParen) && self.parse_keyword(Keyword::WHERE) {
6197 let filter = self.parse_expr()?;
6198 self.expect_token(&Token::RParen)?;
6199 Expr::AggregateExpressionWithFilter {
6200 expr: Box::new(expr),
6201 filter: Box::new(filter),
6202 }
6203 } else {
6204 self.index = i;
6205 expr
6206 }
6207 } else {
6208 expr
6209 };
6210 self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS)
6211 .map(|alias| match alias {
6212 Some(alias) => SelectItem::ExprWithAlias { expr, alias },
6213 None => SelectItem::UnnamedExpr(expr),
6214 })
6215 }
6216 WildcardExpr::QualifiedWildcard(prefix) => Ok(SelectItem::QualifiedWildcard(
6217 prefix,
6218 self.parse_wildcard_additional_options()?,
6219 )),
6220 WildcardExpr::Wildcard => Ok(SelectItem::Wildcard(
6221 self.parse_wildcard_additional_options()?,
6222 )),
6223 }
6224 }
6225
6226 pub fn parse_wildcard_additional_options(
6230 &mut self,
6231 ) -> Result<WildcardAdditionalOptions, ParserError> {
6232 let opt_exclude = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
6233 self.parse_optional_select_item_exclude()?
6234 } else {
6235 None
6236 };
6237 let opt_except = if dialect_of!(self is GenericDialect | BigQueryDialect) {
6238 self.parse_optional_select_item_except()?
6239 } else {
6240 None
6241 };
6242 let opt_rename = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
6243 self.parse_optional_select_item_rename()?
6244 } else {
6245 None
6246 };
6247
6248 let opt_replace = if dialect_of!(self is GenericDialect | BigQueryDialect) {
6249 self.parse_optional_select_item_replace()?
6250 } else {
6251 None
6252 };
6253
6254 Ok(WildcardAdditionalOptions {
6255 opt_exclude,
6256 opt_except,
6257 opt_rename,
6258 opt_replace,
6259 })
6260 }
6261
6262 pub fn parse_optional_select_item_exclude(
6266 &mut self,
6267 ) -> Result<Option<ExcludeSelectItem>, ParserError> {
6268 let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) {
6269 if self.consume_token(&Token::LParen) {
6270 let columns = self.parse_comma_separated(|parser| parser.parse_identifier())?;
6271 self.expect_token(&Token::RParen)?;
6272 Some(ExcludeSelectItem::Multiple(columns))
6273 } else {
6274 let column = self.parse_identifier()?;
6275 Some(ExcludeSelectItem::Single(column))
6276 }
6277 } else {
6278 None
6279 };
6280
6281 Ok(opt_exclude)
6282 }
6283
6284 pub fn parse_optional_select_item_except(
6288 &mut self,
6289 ) -> Result<Option<ExceptSelectItem>, ParserError> {
6290 let opt_except = if self.parse_keyword(Keyword::EXCEPT) {
6291 let idents = self.parse_parenthesized_column_list(Mandatory, false)?;
6292 match &idents[..] {
6293 [] => {
6294 return self.expected(
6295 "at least one column should be parsed by the expect clause",
6296 self.peek_token(),
6297 )?;
6298 }
6299 [first, idents @ ..] => Some(ExceptSelectItem {
6300 first_element: first.clone(),
6301 additional_elements: idents.to_vec(),
6302 }),
6303 }
6304 } else {
6305 None
6306 };
6307
6308 Ok(opt_except)
6309 }
6310
6311 pub fn parse_optional_select_item_rename(
6313 &mut self,
6314 ) -> Result<Option<RenameSelectItem>, ParserError> {
6315 let opt_rename = if self.parse_keyword(Keyword::RENAME) {
6316 if self.consume_token(&Token::LParen) {
6317 let idents =
6318 self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?;
6319 self.expect_token(&Token::RParen)?;
6320 Some(RenameSelectItem::Multiple(idents))
6321 } else {
6322 let ident = self.parse_identifier_with_alias()?;
6323 Some(RenameSelectItem::Single(ident))
6324 }
6325 } else {
6326 None
6327 };
6328
6329 Ok(opt_rename)
6330 }
6331
6332 pub fn parse_optional_select_item_replace(
6334 &mut self,
6335 ) -> Result<Option<ReplaceSelectItem>, ParserError> {
6336 let opt_replace = if self.parse_keyword(Keyword::REPLACE) {
6337 if self.consume_token(&Token::LParen) {
6338 let items = self.parse_comma_separated(|parser| {
6339 Ok(Box::new(parser.parse_replace_elements()?))
6340 })?;
6341 self.expect_token(&Token::RParen)?;
6342 Some(ReplaceSelectItem { items })
6343 } else {
6344 let tok = self.next_token();
6345 return self.expected("( after REPLACE but", tok);
6346 }
6347 } else {
6348 None
6349 };
6350
6351 Ok(opt_replace)
6352 }
6353 pub fn parse_replace_elements(&mut self) -> Result<ReplaceSelectElement, ParserError> {
6354 let expr = self.parse_expr()?;
6355 let as_keyword = self.parse_keyword(Keyword::AS);
6356 let ident = self.parse_identifier()?;
6357 Ok(ReplaceSelectElement {
6358 expr,
6359 column_name: ident,
6360 as_keyword,
6361 })
6362 }
6363
6364 pub fn parse_order_by_expr(&mut self) -> Result<OrderByExpr, ParserError> {
6366 let expr = self.parse_expr()?;
6367
6368 let asc = if self.parse_keyword(Keyword::ASC) {
6369 Some(true)
6370 } else if self.parse_keyword(Keyword::DESC) {
6371 Some(false)
6372 } else {
6373 None
6374 };
6375
6376 let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
6377 Some(true)
6378 } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
6379 Some(false)
6380 } else {
6381 None
6382 };
6383
6384 Ok(OrderByExpr {
6385 expr,
6386 asc,
6387 nulls_first,
6388 })
6389 }
6390
6391 pub fn parse_top(&mut self) -> Result<Top, ParserError> {
6394 let quantity = if self.consume_token(&Token::LParen) {
6395 let quantity = self.parse_expr()?;
6396 self.expect_token(&Token::RParen)?;
6397 Some(quantity)
6398 } else {
6399 Some(Expr::Value(self.parse_number_value()?))
6400 };
6401
6402 let percent = self.parse_keyword(Keyword::PERCENT);
6403
6404 let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
6405
6406 Ok(Top {
6407 with_ties,
6408 percent,
6409 quantity,
6410 })
6411 }
6412
6413 pub fn parse_limit(&mut self) -> Result<Option<Expr>, ParserError> {
6415 if self.parse_keyword(Keyword::ALL) {
6416 Ok(None)
6417 } else {
6418 Ok(Some(self.parse_expr()?))
6419 }
6420 }
6421
6422 pub fn parse_offset(&mut self) -> Result<Offset, ParserError> {
6424 let value = self.parse_expr()?;
6425 let rows = if self.parse_keyword(Keyword::ROW) {
6426 OffsetRows::Row
6427 } else if self.parse_keyword(Keyword::ROWS) {
6428 OffsetRows::Rows
6429 } else {
6430 OffsetRows::None
6431 };
6432 Ok(Offset { value, rows })
6433 }
6434
6435 pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
6437 self.expect_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT])?;
6438 let (quantity, percent) = if self
6439 .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
6440 .is_some()
6441 {
6442 (None, false)
6443 } else {
6444 let quantity = Expr::Value(self.parse_value()?);
6445 let percent = self.parse_keyword(Keyword::PERCENT);
6446 self.expect_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])?;
6447 (Some(quantity), percent)
6448 };
6449 let with_ties = if self.parse_keyword(Keyword::ONLY) {
6450 false
6451 } else if self.parse_keywords(&[Keyword::WITH, Keyword::TIES]) {
6452 true
6453 } else {
6454 return self.expected("one of ONLY or WITH TIES", self.peek_token());
6455 };
6456 Ok(Fetch {
6457 with_ties,
6458 percent,
6459 quantity,
6460 })
6461 }
6462
6463 pub fn parse_lock(&mut self) -> Result<LockClause, ParserError> {
6465 let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? {
6466 Keyword::UPDATE => LockType::Update,
6467 Keyword::SHARE => LockType::Share,
6468 _ => unreachable!(),
6469 };
6470 let of = if self.parse_keyword(Keyword::OF) {
6471 Some(self.parse_object_name()?)
6472 } else {
6473 None
6474 };
6475 let nonblock = if self.parse_keyword(Keyword::NOWAIT) {
6476 Some(NonBlock::Nowait)
6477 } else if self.parse_keywords(&[Keyword::SKIP, Keyword::LOCKED]) {
6478 Some(NonBlock::SkipLocked)
6479 } else {
6480 None
6481 };
6482 Ok(LockClause {
6483 lock_type,
6484 of,
6485 nonblock,
6486 })
6487 }
6488
6489 pub fn parse_values(&mut self, allow_empty: bool) -> Result<Values, ParserError> {
6490 let mut explicit_row = false;
6491
6492 let rows = self.parse_comma_separated(|parser| {
6493 if parser.parse_keyword(Keyword::ROW) {
6494 explicit_row = true;
6495 }
6496
6497 parser.expect_token(&Token::LParen)?;
6498 if allow_empty && parser.peek_token().token == Token::RParen {
6499 parser.next_token();
6500 Ok(vec![])
6501 } else {
6502 let exprs = parser.parse_comma_separated(Parser::parse_expr)?;
6503 parser.expect_token(&Token::RParen)?;
6504 Ok(exprs)
6505 }
6506 })?;
6507 Ok(Values { explicit_row, rows })
6508 }
6509
6510 pub fn parse_start_transaction(&mut self) -> Result<Statement, ParserError> {
6511 self.expect_keyword(Keyword::TRANSACTION)?;
6512 Ok(Statement::StartTransaction {
6513 modes: self.parse_transaction_modes()?,
6514 })
6515 }
6516
6517 pub fn parse_begin(&mut self) -> Result<Statement, ParserError> {
6518 let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]);
6519 Ok(Statement::StartTransaction {
6520 modes: self.parse_transaction_modes()?,
6521 })
6522 }
6523
6524 pub fn parse_transaction_modes(&mut self) -> Result<Vec<TransactionMode>, ParserError> {
6525 let mut modes = vec![];
6526 let mut required = false;
6527 loop {
6528 let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
6529 let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
6530 TransactionIsolationLevel::ReadUncommitted
6531 } else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
6532 TransactionIsolationLevel::ReadCommitted
6533 } else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
6534 TransactionIsolationLevel::RepeatableRead
6535 } else if self.parse_keyword(Keyword::SERIALIZABLE) {
6536 TransactionIsolationLevel::Serializable
6537 } else {
6538 self.expected("isolation level", self.peek_token())?
6539 };
6540 TransactionMode::IsolationLevel(iso_level)
6541 } else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
6542 TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
6543 } else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
6544 TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
6545 } else if required {
6546 self.expected("transaction mode", self.peek_token())?
6547 } else {
6548 break;
6549 };
6550 modes.push(mode);
6551 required = self.consume_token(&Token::Comma);
6556 }
6557 Ok(modes)
6558 }
6559
6560 pub fn parse_commit(&mut self) -> Result<Statement, ParserError> {
6561 Ok(Statement::Commit {
6562 chain: self.parse_commit_rollback_chain()?,
6563 })
6564 }
6565
6566 pub fn parse_rollback(&mut self) -> Result<Statement, ParserError> {
6567 Ok(Statement::Rollback {
6568 chain: self.parse_commit_rollback_chain()?,
6569 })
6570 }
6571
6572 pub fn parse_commit_rollback_chain(&mut self) -> Result<bool, ParserError> {
6573 let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]);
6574 if self.parse_keyword(Keyword::AND) {
6575 let chain = !self.parse_keyword(Keyword::NO);
6576 self.expect_keyword(Keyword::CHAIN)?;
6577 Ok(chain)
6578 } else {
6579 Ok(false)
6580 }
6581 }
6582
6583 pub fn parse_deallocate(&mut self) -> Result<Statement, ParserError> {
6584 let prepare = self.parse_keyword(Keyword::PREPARE);
6585 let name = self.parse_identifier()?;
6586 Ok(Statement::Deallocate { name, prepare })
6587 }
6588
6589 pub fn parse_execute(&mut self) -> Result<Statement, ParserError> {
6590 let name = self.parse_identifier()?;
6591
6592 let mut parameters = vec![];
6593 if self.consume_token(&Token::LParen) {
6594 parameters = self.parse_comma_separated(Parser::parse_expr)?;
6595 self.expect_token(&Token::RParen)?;
6596 }
6597
6598 Ok(Statement::Execute { name, parameters })
6599 }
6600
6601 pub fn parse_prepare(&mut self) -> Result<Statement, ParserError> {
6602 let name = self.parse_identifier()?;
6603
6604 let mut data_types = vec![];
6605 if self.consume_token(&Token::LParen) {
6606 data_types = self.parse_comma_separated(Parser::parse_data_type)?;
6607 self.expect_token(&Token::RParen)?;
6608 }
6609
6610 self.expect_keyword(Keyword::AS)?;
6611 let statement = Box::new(self.parse_statement()?);
6612 Ok(Statement::Prepare {
6613 name,
6614 data_types,
6615 statement,
6616 })
6617 }
6618
6619 pub fn parse_merge_clauses(&mut self) -> Result<Vec<MergeClause>, ParserError> {
6620 let mut clauses: Vec<MergeClause> = vec![];
6621 loop {
6622 if self.peek_token() == Token::EOF || self.peek_token() == Token::SemiColon {
6623 break;
6624 }
6625 self.expect_keyword(Keyword::WHEN)?;
6626
6627 let is_not_matched = self.parse_keyword(Keyword::NOT);
6628 self.expect_keyword(Keyword::MATCHED)?;
6629
6630 let predicate = if self.parse_keyword(Keyword::AND) {
6631 Some(self.parse_expr()?)
6632 } else {
6633 None
6634 };
6635
6636 self.expect_keyword(Keyword::THEN)?;
6637
6638 clauses.push(
6639 match self.parse_one_of_keywords(&[
6640 Keyword::UPDATE,
6641 Keyword::INSERT,
6642 Keyword::DELETE,
6643 ]) {
6644 Some(Keyword::UPDATE) => {
6645 if is_not_matched {
6646 return Err(ParserError::ParserError(
6647 "UPDATE in NOT MATCHED merge clause".to_string(),
6648 ));
6649 }
6650 self.expect_keyword(Keyword::SET)?;
6651 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
6652 MergeClause::MatchedUpdate {
6653 predicate,
6654 assignments,
6655 }
6656 }
6657 Some(Keyword::DELETE) => {
6658 if is_not_matched {
6659 return Err(ParserError::ParserError(
6660 "DELETE in NOT MATCHED merge clause".to_string(),
6661 ));
6662 }
6663 MergeClause::MatchedDelete(predicate)
6664 }
6665 Some(Keyword::INSERT) => {
6666 if !is_not_matched {
6667 return Err(ParserError::ParserError(
6668 "INSERT in MATCHED merge clause".to_string(),
6669 ));
6670 }
6671 let is_mysql = dialect_of!(self is MySqlDialect);
6672 let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?;
6673 self.expect_keyword(Keyword::VALUES)?;
6674 let values = self.parse_values(is_mysql)?;
6675 MergeClause::NotMatched {
6676 predicate,
6677 columns,
6678 values,
6679 }
6680 }
6681 Some(_) => {
6682 return Err(ParserError::ParserError(
6683 "expected UPDATE, DELETE or INSERT in merge clause".to_string(),
6684 ));
6685 }
6686 None => {
6687 return Err(ParserError::ParserError(
6688 "expected UPDATE, DELETE or INSERT in merge clause".to_string(),
6689 ));
6690 }
6691 },
6692 );
6693 }
6694 Ok(clauses)
6695 }
6696
6697 pub fn parse_merge(&mut self) -> Result<Statement, ParserError> {
6698 let into = self.parse_keyword(Keyword::INTO);
6699
6700 let table = self.parse_table_factor()?;
6701
6702 self.expect_keyword(Keyword::USING)?;
6703 let source = self.parse_table_factor()?;
6704 self.expect_keyword(Keyword::ON)?;
6705 let on = self.parse_expr()?;
6706 let clauses = self.parse_merge_clauses()?;
6707
6708 Ok(Statement::Merge {
6709 into,
6710 table,
6711 source,
6712 on: Box::new(on),
6713 clauses,
6714 })
6715 }
6716
6717 pub fn parse_create_sequence(&mut self, temporary: bool) -> Result<Statement, ParserError> {
6723 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6725 let name = self.parse_object_name()?;
6727 let mut data_type: Option<DataType> = None;
6729 if self.parse_keywords(&[Keyword::AS]) {
6730 data_type = Some(self.parse_data_type()?)
6731 }
6732 let sequence_options = self.parse_create_sequence_options()?;
6733 let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) {
6735 if self.parse_keywords(&[Keyword::NONE]) {
6736 Some(ObjectName(vec![Ident::new("NONE")]))
6737 } else {
6738 Some(self.parse_object_name()?)
6739 }
6740 } else {
6741 None
6742 };
6743 Ok(Statement::CreateSequence {
6744 temporary,
6745 if_not_exists,
6746 name,
6747 data_type,
6748 sequence_options,
6749 owned_by,
6750 })
6751 }
6752
6753 fn parse_create_sequence_options(&mut self) -> Result<Vec<SequenceOptions>, ParserError> {
6754 let mut sequence_options = vec![];
6755 if self.parse_keywords(&[Keyword::INCREMENT]) {
6757 if self.parse_keywords(&[Keyword::BY]) {
6758 sequence_options.push(SequenceOptions::IncrementBy(
6759 Expr::Value(self.parse_number_value()?),
6760 true,
6761 ));
6762 } else {
6763 sequence_options.push(SequenceOptions::IncrementBy(
6764 Expr::Value(self.parse_number_value()?),
6765 false,
6766 ));
6767 }
6768 }
6769 if self.parse_keyword(Keyword::MINVALUE) {
6771 sequence_options.push(SequenceOptions::MinValue(MinMaxValue::Some(Expr::Value(
6772 self.parse_number_value()?,
6773 ))));
6774 } else if self.parse_keywords(&[Keyword::NO, Keyword::MINVALUE]) {
6775 sequence_options.push(SequenceOptions::MinValue(MinMaxValue::None));
6776 } else {
6777 sequence_options.push(SequenceOptions::MinValue(MinMaxValue::Empty));
6778 }
6779 if self.parse_keywords(&[Keyword::MAXVALUE]) {
6781 sequence_options.push(SequenceOptions::MaxValue(MinMaxValue::Some(Expr::Value(
6782 self.parse_number_value()?,
6783 ))));
6784 } else if self.parse_keywords(&[Keyword::NO, Keyword::MAXVALUE]) {
6785 sequence_options.push(SequenceOptions::MaxValue(MinMaxValue::None));
6786 } else {
6787 sequence_options.push(SequenceOptions::MaxValue(MinMaxValue::Empty));
6788 }
6789 if self.parse_keywords(&[Keyword::START]) {
6791 if self.parse_keywords(&[Keyword::WITH]) {
6792 sequence_options.push(SequenceOptions::StartWith(
6793 Expr::Value(self.parse_number_value()?),
6794 true,
6795 ));
6796 } else {
6797 sequence_options.push(SequenceOptions::StartWith(
6798 Expr::Value(self.parse_number_value()?),
6799 false,
6800 ));
6801 }
6802 }
6803 if self.parse_keywords(&[Keyword::CACHE]) {
6805 sequence_options.push(SequenceOptions::Cache(Expr::Value(
6806 self.parse_number_value()?,
6807 )));
6808 }
6809 if self.parse_keywords(&[Keyword::NO]) {
6811 if self.parse_keywords(&[Keyword::CYCLE]) {
6812 sequence_options.push(SequenceOptions::Cycle(true));
6813 }
6814 } else if self.parse_keywords(&[Keyword::CYCLE]) {
6815 sequence_options.push(SequenceOptions::Cycle(false));
6816 }
6817 Ok(sequence_options)
6818 }
6819
6820 pub fn index(&self) -> usize {
6822 self.index
6823 }
6824}
6825
6826impl Word {
6827 pub fn to_ident(&self) -> Ident {
6828 Ident {
6829 value: self.value.clone(),
6830 quote_style: self.quote_style,
6831 }
6832 }
6833}
6834
6835#[cfg(test)]
6836mod tests {
6837 use crate::test_utils::{all_dialects, TestedDialects};
6838
6839 use super::*;
6840
6841 #[test]
6842 fn test_prev_index() {
6843 let sql = "SELECT version";
6844 all_dialects().run_parser_method(sql, |parser| {
6845 assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
6846 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
6847 parser.prev_token();
6848 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
6849 assert_eq!(parser.next_token(), Token::make_word("version", None));
6850 parser.prev_token();
6851 assert_eq!(parser.peek_token(), Token::make_word("version", None));
6852 assert_eq!(parser.next_token(), Token::make_word("version", None));
6853 assert_eq!(parser.peek_token(), Token::EOF);
6854 parser.prev_token();
6855 assert_eq!(parser.next_token(), Token::make_word("version", None));
6856 assert_eq!(parser.next_token(), Token::EOF);
6857 assert_eq!(parser.next_token(), Token::EOF);
6858 parser.prev_token();
6859 });
6860 }
6861
6862 #[test]
6863 fn test_parse_limit() {
6864 let sql = "SELECT * FROM user LIMIT 1";
6865 all_dialects().run_parser_method(sql, |parser| {
6866 let ast = parser.parse_query().unwrap();
6867 assert_eq!(ast.to_string(), sql.to_string());
6868 });
6869
6870 let sql = "SELECT * FROM user LIMIT $1 OFFSET $2";
6871 let dialects = TestedDialects {
6872 dialects: vec![
6873 Box::new(PostgreSqlDialect {}),
6874 Box::new(ClickHouseDialect {}),
6875 Box::new(GenericDialect {}),
6876 Box::new(MsSqlDialect {}),
6877 Box::new(SnowflakeDialect {}),
6878 ],
6879 };
6880
6881 dialects.run_parser_method(sql, |parser| {
6882 let ast = parser.parse_query().unwrap();
6883 assert_eq!(ast.to_string(), sql.to_string());
6884 });
6885
6886 let sql = "SELECT * FROM user LIMIT ? OFFSET ?";
6887 let dialects = TestedDialects {
6888 dialects: vec![Box::new(MySqlDialect {})],
6889 };
6890 dialects.run_parser_method(sql, |parser| {
6891 let ast = parser.parse_query().unwrap();
6892 assert_eq!(ast.to_string(), sql.to_string());
6893 });
6894 }
6895
6896 #[cfg(test)]
6897 mod test_parse_data_type {
6898 use crate::ast::{
6899 CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ObjectName, TimezoneInfo,
6900 };
6901 use crate::dialect::{AnsiDialect, GenericDialect};
6902 use crate::test_utils::TestedDialects;
6903
6904 macro_rules! test_parse_data_type {
6905 ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{
6906 $dialect.run_parser_method(&*$input, |parser| {
6907 let data_type = parser.parse_data_type().unwrap();
6908 assert_eq!($expected_type, data_type);
6909 assert_eq!($input.to_string(), data_type.to_string());
6910 });
6911 }};
6912 }
6913
6914 #[test]
6915 fn test_ansii_character_string_types() {
6916 let dialect = TestedDialects {
6918 dialects: vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})],
6919 };
6920
6921 test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
6922
6923 test_parse_data_type!(
6924 dialect,
6925 "CHARACTER(20)",
6926 DataType::Character(Some(CharacterLength {
6927 length: 20,
6928 unit: None
6929 }))
6930 );
6931
6932 test_parse_data_type!(
6933 dialect,
6934 "CHARACTER(20 CHARACTERS)",
6935 DataType::Character(Some(CharacterLength {
6936 length: 20,
6937 unit: Some(CharLengthUnits::Characters)
6938 }))
6939 );
6940
6941 test_parse_data_type!(
6942 dialect,
6943 "CHARACTER(20 OCTETS)",
6944 DataType::Character(Some(CharacterLength {
6945 length: 20,
6946 unit: Some(CharLengthUnits::Octets)
6947 }))
6948 );
6949
6950 test_parse_data_type!(dialect, "CHAR", DataType::Char(None));
6951
6952 test_parse_data_type!(
6953 dialect,
6954 "CHAR(20)",
6955 DataType::Char(Some(CharacterLength {
6956 length: 20,
6957 unit: None
6958 }))
6959 );
6960
6961 test_parse_data_type!(
6962 dialect,
6963 "CHAR(20 CHARACTERS)",
6964 DataType::Char(Some(CharacterLength {
6965 length: 20,
6966 unit: Some(CharLengthUnits::Characters)
6967 }))
6968 );
6969
6970 test_parse_data_type!(
6971 dialect,
6972 "CHAR(20 OCTETS)",
6973 DataType::Char(Some(CharacterLength {
6974 length: 20,
6975 unit: Some(CharLengthUnits::Octets)
6976 }))
6977 );
6978
6979 test_parse_data_type!(
6980 dialect,
6981 "CHARACTER VARYING(20)",
6982 DataType::CharacterVarying(Some(CharacterLength {
6983 length: 20,
6984 unit: None
6985 }))
6986 );
6987
6988 test_parse_data_type!(
6989 dialect,
6990 "CHARACTER VARYING(20 CHARACTERS)",
6991 DataType::CharacterVarying(Some(CharacterLength {
6992 length: 20,
6993 unit: Some(CharLengthUnits::Characters)
6994 }))
6995 );
6996
6997 test_parse_data_type!(
6998 dialect,
6999 "CHARACTER VARYING(20 OCTETS)",
7000 DataType::CharacterVarying(Some(CharacterLength {
7001 length: 20,
7002 unit: Some(CharLengthUnits::Octets)
7003 }))
7004 );
7005
7006 test_parse_data_type!(
7007 dialect,
7008 "CHAR VARYING(20)",
7009 DataType::CharVarying(Some(CharacterLength {
7010 length: 20,
7011 unit: None
7012 }))
7013 );
7014
7015 test_parse_data_type!(
7016 dialect,
7017 "CHAR VARYING(20 CHARACTERS)",
7018 DataType::CharVarying(Some(CharacterLength {
7019 length: 20,
7020 unit: Some(CharLengthUnits::Characters)
7021 }))
7022 );
7023
7024 test_parse_data_type!(
7025 dialect,
7026 "CHAR VARYING(20 OCTETS)",
7027 DataType::CharVarying(Some(CharacterLength {
7028 length: 20,
7029 unit: Some(CharLengthUnits::Octets)
7030 }))
7031 );
7032
7033 test_parse_data_type!(
7034 dialect,
7035 "VARCHAR(20)",
7036 DataType::Varchar(Some(CharacterLength {
7037 length: 20,
7038 unit: None
7039 }))
7040 );
7041 }
7042
7043 #[test]
7044 fn test_ansii_character_large_object_types() {
7045 let dialect = TestedDialects {
7047 dialects: vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})],
7048 };
7049
7050 test_parse_data_type!(
7051 dialect,
7052 "CHARACTER LARGE OBJECT",
7053 DataType::CharacterLargeObject(None)
7054 );
7055 test_parse_data_type!(
7056 dialect,
7057 "CHARACTER LARGE OBJECT(20)",
7058 DataType::CharacterLargeObject(Some(20))
7059 );
7060
7061 test_parse_data_type!(
7062 dialect,
7063 "CHAR LARGE OBJECT",
7064 DataType::CharLargeObject(None)
7065 );
7066 test_parse_data_type!(
7067 dialect,
7068 "CHAR LARGE OBJECT(20)",
7069 DataType::CharLargeObject(Some(20))
7070 );
7071
7072 test_parse_data_type!(dialect, "CLOB", DataType::Clob(None));
7073 test_parse_data_type!(dialect, "CLOB(20)", DataType::Clob(Some(20)));
7074 }
7075
7076 #[test]
7077 fn test_parse_custom_types() {
7078 let dialect = TestedDialects {
7079 dialects: vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})],
7080 };
7081 test_parse_data_type!(
7082 dialect,
7083 "GEOMETRY",
7084 DataType::Custom(ObjectName(vec!["GEOMETRY".into()]), vec![])
7085 );
7086
7087 test_parse_data_type!(
7088 dialect,
7089 "GEOMETRY(POINT)",
7090 DataType::Custom(
7091 ObjectName(vec!["GEOMETRY".into()]),
7092 vec!["POINT".to_string()]
7093 )
7094 );
7095
7096 test_parse_data_type!(
7097 dialect,
7098 "GEOMETRY(POINT, 4326)",
7099 DataType::Custom(
7100 ObjectName(vec!["GEOMETRY".into()]),
7101 vec!["POINT".to_string(), "4326".to_string()]
7102 )
7103 );
7104 }
7105
7106 #[test]
7107 fn test_ansii_exact_numeric_types() {
7108 let dialect = TestedDialects {
7110 dialects: vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})],
7111 };
7112
7113 test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
7114
7115 test_parse_data_type!(
7116 dialect,
7117 "NUMERIC(2)",
7118 DataType::Numeric(ExactNumberInfo::Precision(2))
7119 );
7120
7121 test_parse_data_type!(
7122 dialect,
7123 "NUMERIC(2,10)",
7124 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(2, 10))
7125 );
7126
7127 test_parse_data_type!(dialect, "DECIMAL", DataType::Decimal(ExactNumberInfo::None));
7128
7129 test_parse_data_type!(
7130 dialect,
7131 "DECIMAL(2)",
7132 DataType::Decimal(ExactNumberInfo::Precision(2))
7133 );
7134
7135 test_parse_data_type!(
7136 dialect,
7137 "DECIMAL(2,10)",
7138 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(2, 10))
7139 );
7140
7141 test_parse_data_type!(dialect, "DEC", DataType::Dec(ExactNumberInfo::None));
7142
7143 test_parse_data_type!(
7144 dialect,
7145 "DEC(2)",
7146 DataType::Dec(ExactNumberInfo::Precision(2))
7147 );
7148
7149 test_parse_data_type!(
7150 dialect,
7151 "DEC(2,10)",
7152 DataType::Dec(ExactNumberInfo::PrecisionAndScale(2, 10))
7153 );
7154 }
7155
7156 #[test]
7157 fn test_ansii_date_type() {
7158 let dialect = TestedDialects {
7160 dialects: vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})],
7161 };
7162
7163 test_parse_data_type!(dialect, "DATE", DataType::Date);
7164
7165 test_parse_data_type!(dialect, "TIME", DataType::Time(None, TimezoneInfo::None));
7166
7167 test_parse_data_type!(
7168 dialect,
7169 "TIME(6)",
7170 DataType::Time(Some(6), TimezoneInfo::None)
7171 );
7172
7173 test_parse_data_type!(
7174 dialect,
7175 "TIME WITH TIME ZONE",
7176 DataType::Time(None, TimezoneInfo::WithTimeZone)
7177 );
7178
7179 test_parse_data_type!(
7180 dialect,
7181 "TIME(6) WITH TIME ZONE",
7182 DataType::Time(Some(6), TimezoneInfo::WithTimeZone)
7183 );
7184
7185 test_parse_data_type!(
7186 dialect,
7187 "TIME WITHOUT TIME ZONE",
7188 DataType::Time(None, TimezoneInfo::WithoutTimeZone)
7189 );
7190
7191 test_parse_data_type!(
7192 dialect,
7193 "TIME(6) WITHOUT TIME ZONE",
7194 DataType::Time(Some(6), TimezoneInfo::WithoutTimeZone)
7195 );
7196
7197 test_parse_data_type!(
7198 dialect,
7199 "TIMESTAMP",
7200 DataType::Timestamp(None, TimezoneInfo::None)
7201 );
7202
7203 test_parse_data_type!(
7204 dialect,
7205 "TIMESTAMP(22)",
7206 DataType::Timestamp(Some(22), TimezoneInfo::None)
7207 );
7208
7209 test_parse_data_type!(
7210 dialect,
7211 "TIMESTAMP(22) WITH TIME ZONE",
7212 DataType::Timestamp(Some(22), TimezoneInfo::WithTimeZone)
7213 );
7214
7215 test_parse_data_type!(
7216 dialect,
7217 "TIMESTAMP(33) WITHOUT TIME ZONE",
7218 DataType::Timestamp(Some(33), TimezoneInfo::WithoutTimeZone)
7219 );
7220 }
7221 }
7222
7223 #[test]
7224 fn test_parse_schema_name() {
7225 macro_rules! test_parse_schema_name {
7227 ($input:expr, $expected_name:expr $(,)?) => {{
7228 all_dialects().run_parser_method(&*$input, |parser| {
7229 let schema_name = parser.parse_schema_name().unwrap();
7230 assert_eq!(schema_name, $expected_name);
7232 assert_eq!(schema_name.to_string(), $input.to_string());
7234 });
7235 }};
7236 }
7237
7238 let dummy_name = ObjectName(vec![Ident::new("dummy_name")]);
7239 let dummy_authorization = Ident::new("dummy_authorization");
7240
7241 test_parse_schema_name!(
7242 format!("{dummy_name}"),
7243 SchemaName::Simple(dummy_name.clone())
7244 );
7245
7246 test_parse_schema_name!(
7247 format!("AUTHORIZATION {dummy_authorization}"),
7248 SchemaName::UnnamedAuthorization(dummy_authorization.clone()),
7249 );
7250 test_parse_schema_name!(
7251 format!("{dummy_name} AUTHORIZATION {dummy_authorization}"),
7252 SchemaName::NamedAuthorization(dummy_name.clone(), dummy_authorization.clone()),
7253 );
7254 }
7255
7256 #[test]
7257 fn mysql_parse_index_table_constraint() {
7258 macro_rules! test_parse_table_constraint {
7259 ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{
7260 $dialect.run_parser_method(&*$input, |parser| {
7261 let constraint = parser.parse_optional_table_constraint().unwrap().unwrap();
7262 assert_eq!(constraint, $expected);
7264 assert_eq!(constraint.to_string(), $input.to_string());
7266 });
7267 }};
7268 }
7269
7270 let dialect = TestedDialects {
7271 dialects: vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})],
7272 };
7273
7274 test_parse_table_constraint!(
7275 dialect,
7276 "INDEX (c1)",
7277 TableConstraint::Index {
7278 display_as_key: false,
7279 name: None,
7280 index_type: None,
7281 columns: vec![Ident::new("c1")],
7282 }
7283 );
7284
7285 test_parse_table_constraint!(
7286 dialect,
7287 "KEY (c1)",
7288 TableConstraint::Index {
7289 display_as_key: true,
7290 name: None,
7291 index_type: None,
7292 columns: vec![Ident::new("c1")],
7293 }
7294 );
7295
7296 test_parse_table_constraint!(
7297 dialect,
7298 "INDEX 'index' (c1, c2)",
7299 TableConstraint::Index {
7300 display_as_key: false,
7301 name: Some(Ident::with_quote('\'', "index")),
7302 index_type: None,
7303 columns: vec![Ident::new("c1"), Ident::new("c2")],
7304 }
7305 );
7306
7307 test_parse_table_constraint!(
7308 dialect,
7309 "INDEX USING BTREE (c1)",
7310 TableConstraint::Index {
7311 display_as_key: false,
7312 name: None,
7313 index_type: Some(IndexType::BTree),
7314 columns: vec![Ident::new("c1")],
7315 }
7316 );
7317
7318 test_parse_table_constraint!(
7319 dialect,
7320 "INDEX USING HASH (c1)",
7321 TableConstraint::Index {
7322 display_as_key: false,
7323 name: None,
7324 index_type: Some(IndexType::Hash),
7325 columns: vec![Ident::new("c1")],
7326 }
7327 );
7328
7329 test_parse_table_constraint!(
7330 dialect,
7331 "INDEX idx_name USING BTREE (c1)",
7332 TableConstraint::Index {
7333 display_as_key: false,
7334 name: Some(Ident::new("idx_name")),
7335 index_type: Some(IndexType::BTree),
7336 columns: vec![Ident::new("c1")],
7337 }
7338 );
7339
7340 test_parse_table_constraint!(
7341 dialect,
7342 "INDEX idx_name USING HASH (c1)",
7343 TableConstraint::Index {
7344 display_as_key: false,
7345 name: Some(Ident::new("idx_name")),
7346 index_type: Some(IndexType::Hash),
7347 columns: vec![Ident::new("c1")],
7348 }
7349 );
7350 }
7351
7352 #[test]
7353 fn test_update_has_keyword() {
7354 let sql = r#"UPDATE test SET name=$1,
7355 value=$2,
7356 where=$3,
7357 create=$4,
7358 is_default=$5,
7359 classification=$6,
7360 sort=$7
7361 WHERE id=$8"#;
7362 let pg_dialect = PostgreSqlDialect {};
7363 let ast = Parser::parse_sql(&pg_dialect, sql).unwrap();
7364 assert_eq!(
7365 ast[0].to_string(),
7366 r#"UPDATE test SET name = $1, value = $2, where = $3, create = $4, is_default = $5, classification = $6, sort = $7 WHERE id = $8"#
7367 );
7368 }
7369
7370 #[test]
7371 fn test_tokenizer_error_loc() {
7372 let sql = "foo '";
7373 let ast = Parser::parse_sql(&GenericDialect, sql);
7374 assert_eq!(
7375 ast,
7376 Err(ParserError::TokenizerError(
7377 "Unterminated string literal at Line: 1, Column 5".to_string()
7378 ))
7379 );
7380 }
7381
7382 #[test]
7383 fn test_parser_error_loc() {
7384 let sql = "SELECT this is a syntax error";
7387 let ast = Parser::parse_sql(&GenericDialect, sql);
7388 assert_eq!(
7389 ast,
7390 Err(ParserError::ParserError(
7391 "Expected [NOT] NULL or TRUE|FALSE or [NOT] DISTINCT FROM after IS, found: a"
7392 .to_string()
7393 ))
7394 );
7395 }
7396
7397 #[test]
7398 fn test_nested_explain_error() {
7399 let sql = "EXPLAIN EXPLAIN SELECT 1";
7400 let ast = Parser::parse_sql(&GenericDialect, sql);
7401 assert_eq!(
7402 ast,
7403 Err(ParserError::ParserError(
7404 "Explain must be root of the plan".to_string()
7405 ))
7406 );
7407 }
7408}