1#[cfg(not(feature = "std"))]
19use crate::alloc::string::ToString;
20use crate::ast::helpers::attached_token::AttachedToken;
21use crate::ast::helpers::key_value_options::{
22 KeyValueOption, KeyValueOptionKind, KeyValueOptions, KeyValueOptionsDelimiter,
23};
24use crate::ast::helpers::stmt_create_database::CreateDatabaseBuilder;
25use crate::ast::helpers::stmt_create_table::CreateTableBuilder;
26use crate::ast::helpers::stmt_data_loading::{
27 FileStagingCommand, StageLoadSelectItem, StageLoadSelectItemKind, StageParamsObject,
28};
29use crate::ast::{
30 AlterTable, AlterTableOperation, AlterTableType, CatalogSyncNamespaceMode, ColumnOption,
31 ColumnPolicy, ColumnPolicyProperty, ContactEntry, CopyIntoSnowflakeKind, CreateTable,
32 CreateTableLikeKind, DollarQuotedString, Ident, IdentityParameters, IdentityProperty,
33 IdentityPropertyFormatKind, IdentityPropertyKind, IdentityPropertyOrder, InitializeKind,
34 Insert, MultiTableInsertIntoClause, MultiTableInsertType, MultiTableInsertValue,
35 MultiTableInsertValues, MultiTableInsertWhenClause, ObjectName, ObjectNamePart,
36 RefreshModeKind, RowAccessPolicy, ShowObjects, SqlOption, Statement, StorageLifecyclePolicy,
37 StorageSerializationPolicy, TableObject, TagsColumnOption, Value, WrappedCollection,
38};
39use crate::dialect::{Dialect, Precedence};
40use crate::keywords::Keyword;
41use crate::parser::{IsOptional, Parser, ParserError};
42use crate::tokenizer::TokenWithSpan;
43use crate::tokenizer::{Span, Token};
44#[cfg(not(feature = "std"))]
45use alloc::boxed::Box;
46#[cfg(not(feature = "std"))]
47use alloc::string::String;
48#[cfg(not(feature = "std"))]
49use alloc::vec::Vec;
50#[cfg(not(feature = "std"))]
51use alloc::{format, vec};
52
53use super::keywords::RESERVED_FOR_IDENTIFIER;
54
55const RESERVED_KEYWORDS_FOR_SELECT_ITEM_OPERATOR: [Keyword; 1] = [Keyword::CONNECT_BY_ROOT];
56
57const RESERVED_KEYWORDS_FOR_TABLE_FACTOR: &[Keyword] = &[
59 Keyword::ALL,
60 Keyword::ALTER,
61 Keyword::AND,
62 Keyword::ANY,
63 Keyword::AS,
64 Keyword::BETWEEN,
65 Keyword::BY,
66 Keyword::CHECK,
67 Keyword::COLUMN,
68 Keyword::CONNECT,
69 Keyword::CREATE,
70 Keyword::CROSS,
71 Keyword::CURRENT,
72 Keyword::DELETE,
73 Keyword::DISTINCT,
74 Keyword::DROP,
75 Keyword::ELSE,
76 Keyword::EXISTS,
77 Keyword::FOLLOWING,
78 Keyword::FOR,
79 Keyword::FROM,
80 Keyword::FULL,
81 Keyword::GRANT,
82 Keyword::GROUP,
83 Keyword::HAVING,
84 Keyword::ILIKE,
85 Keyword::IN,
86 Keyword::INCREMENT,
87 Keyword::INNER,
88 Keyword::INSERT,
89 Keyword::INTERSECT,
90 Keyword::INTO,
91 Keyword::IS,
92 Keyword::JOIN,
93 Keyword::LEFT,
94 Keyword::LIKE,
95 Keyword::MINUS,
96 Keyword::NATURAL,
97 Keyword::NOT,
98 Keyword::NULL,
99 Keyword::OF,
100 Keyword::ON,
101 Keyword::OR,
102 Keyword::ORDER,
103 Keyword::QUALIFY,
104 Keyword::REGEXP,
105 Keyword::REVOKE,
106 Keyword::RIGHT,
107 Keyword::RLIKE,
108 Keyword::ROW,
109 Keyword::ROWS,
110 Keyword::SAMPLE,
111 Keyword::SELECT,
112 Keyword::SET,
113 Keyword::SOME,
114 Keyword::START,
115 Keyword::TABLE,
116 Keyword::TABLESAMPLE,
117 Keyword::THEN,
118 Keyword::TO,
119 Keyword::TRIGGER,
120 Keyword::UNION,
121 Keyword::UNIQUE,
122 Keyword::UPDATE,
123 Keyword::USING,
124 Keyword::VALUES,
125 Keyword::WHEN,
126 Keyword::WHENEVER,
127 Keyword::WHERE,
128 Keyword::WINDOW,
129 Keyword::WITH,
130];
131
132#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
134#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
135pub struct SnowflakeDialect;
136
137impl Dialect for SnowflakeDialect {
138 fn is_identifier_start(&self, ch: char) -> bool {
140 ch.is_ascii_lowercase() || ch.is_ascii_uppercase() || ch == '_'
141 }
142
143 fn supports_projection_trailing_commas(&self) -> bool {
144 true
145 }
146
147 fn supports_from_trailing_commas(&self) -> bool {
148 true
149 }
150
151 fn supports_object_name_double_dot_notation(&self) -> bool {
156 true
157 }
158
159 fn is_identifier_part(&self, ch: char) -> bool {
160 ch.is_ascii_lowercase()
161 || ch.is_ascii_uppercase()
162 || ch.is_ascii_digit()
163 || ch == '$'
164 || ch == '_'
165 }
166
167 fn supports_string_literal_backslash_escape(&self) -> bool {
169 true
170 }
171
172 fn supports_within_after_array_aggregation(&self) -> bool {
173 true
174 }
175
176 fn supports_outer_join_operator(&self) -> bool {
178 true
179 }
180
181 fn supports_connect_by(&self) -> bool {
182 true
183 }
184
185 fn supports_execute_immediate(&self) -> bool {
187 true
188 }
189
190 fn supports_match_recognize(&self) -> bool {
191 true
192 }
193
194 fn supports_dictionary_syntax(&self) -> bool {
199 true
200 }
201
202 fn supports_window_function_null_treatment_arg(&self) -> bool {
205 true
206 }
207
208 fn supports_parenthesized_set_variables(&self) -> bool {
210 true
211 }
212
213 fn supports_comment_on(&self) -> bool {
215 true
216 }
217
218 fn supports_extract_comma_syntax(&self) -> bool {
220 true
221 }
222
223 fn supports_subquery_as_function_arg(&self) -> bool {
225 true
226 }
227
228 fn supports_create_view_comment_syntax(&self) -> bool {
230 true
231 }
232
233 fn supports_array_typedef_without_element_type(&self) -> bool {
235 true
236 }
237
238 fn supports_parens_around_table_factor(&self) -> bool {
240 true
241 }
242
243 fn supports_values_as_table_factor(&self) -> bool {
245 true
246 }
247
248 fn parse_statement(&self, parser: &mut Parser) -> Option<Result<Statement, ParserError>> {
249 if parser.parse_keyword(Keyword::BEGIN) {
250 if parser
254 .peek_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK, Keyword::NAME])
255 .is_some()
256 || matches!(parser.peek_token_ref().token, Token::SemiColon | Token::EOF)
257 {
258 parser.prev_token();
259 return None;
260 }
261 return Some(parser.parse_begin_exception_end());
262 }
263
264 if parser.parse_keywords(&[Keyword::ALTER, Keyword::DYNAMIC, Keyword::TABLE]) {
265 return Some(parse_alter_dynamic_table(parser));
267 }
268
269 if parser.parse_keywords(&[Keyword::ALTER, Keyword::EXTERNAL, Keyword::TABLE]) {
270 return Some(parse_alter_external_table(parser));
272 }
273
274 if parser.parse_keywords(&[Keyword::ALTER, Keyword::SESSION]) {
275 let set = match parser.parse_one_of_keywords(&[Keyword::SET, Keyword::UNSET]) {
277 Some(Keyword::SET) => true,
278 Some(Keyword::UNSET) => false,
279 _ => return Some(parser.expected_ref("SET or UNSET", parser.peek_token_ref())),
280 };
281 return Some(parse_alter_session(parser, set));
282 }
283
284 if parser.parse_keyword(Keyword::CREATE) {
285 let or_replace = parser.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
288 let global = match parser.parse_one_of_keywords(&[Keyword::LOCAL, Keyword::GLOBAL]) {
290 Some(Keyword::LOCAL) => Some(false),
291 Some(Keyword::GLOBAL) => Some(true),
292 _ => None,
293 };
294
295 let dynamic = parser.parse_keyword(Keyword::DYNAMIC);
296
297 let mut temporary = false;
298 let mut volatile = false;
299 let mut transient = false;
300 let mut iceberg = false;
301
302 match parser.parse_one_of_keywords(&[
303 Keyword::TEMP,
304 Keyword::TEMPORARY,
305 Keyword::VOLATILE,
306 Keyword::TRANSIENT,
307 Keyword::ICEBERG,
308 ]) {
309 Some(Keyword::TEMP | Keyword::TEMPORARY) => temporary = true,
310 Some(Keyword::VOLATILE) => volatile = true,
311 Some(Keyword::TRANSIENT) => transient = true,
312 Some(Keyword::ICEBERG) => iceberg = true,
313 _ => {}
314 }
315
316 if parser.parse_keyword(Keyword::STAGE) {
317 return Some(parse_create_stage(or_replace, temporary, parser));
319 } else if parser.parse_keyword(Keyword::TABLE) {
320 return Some(
321 parse_create_table(
322 or_replace, global, temporary, volatile, transient, iceberg, dynamic,
323 parser,
324 )
325 .map(Into::into),
326 );
327 } else if parser.parse_keyword(Keyword::DATABASE) {
328 return Some(parse_create_database(or_replace, transient, parser));
329 } else {
330 let mut back = 1;
332 if or_replace {
333 back += 2
334 }
335 if temporary {
336 back += 1
337 }
338 for _i in 0..back {
339 parser.prev_token();
340 }
341 }
342 }
343 if parser.parse_keywords(&[Keyword::COPY, Keyword::INTO]) {
344 return Some(parse_copy_into(parser));
346 }
347
348 if let Some(kw) = parser.parse_one_of_keywords(&[
349 Keyword::LIST,
350 Keyword::LS,
351 Keyword::REMOVE,
352 Keyword::RM,
353 ]) {
354 return Some(parse_file_staging_command(kw, parser));
355 }
356
357 if parser.parse_keyword(Keyword::SHOW) {
358 let terse = parser.parse_keyword(Keyword::TERSE);
359 if parser.parse_keyword(Keyword::OBJECTS) {
360 return Some(parse_show_objects(terse, parser));
361 }
362 if terse {
364 parser.prev_token();
365 }
366 parser.prev_token();
368 }
369
370 if parser.parse_keyword(Keyword::INSERT) {
373 let insert_token = parser.get_current_token().clone();
374 let overwrite = parser.parse_keyword(Keyword::OVERWRITE);
375
376 if let Some(kw) = parser.parse_one_of_keywords(&[Keyword::ALL, Keyword::FIRST]) {
378 let multi_table_insert_type = match kw {
379 Keyword::FIRST => MultiTableInsertType::First,
380 _ => MultiTableInsertType::All,
381 };
382 return Some(parse_multi_table_insert(
383 parser,
384 insert_token,
385 overwrite,
386 multi_table_insert_type,
387 ));
388 }
389
390 if overwrite {
392 parser.prev_token(); }
394 parser.prev_token(); }
396
397 None
398 }
399
400 fn parse_column_option(
401 &self,
402 parser: &mut Parser,
403 ) -> Result<Option<Result<Option<ColumnOption>, ParserError>>, ParserError> {
404 parser.maybe_parse(|parser| {
405 let with = parser.parse_keyword(Keyword::WITH);
406
407 if parser.parse_keyword(Keyword::IDENTITY) {
408 Ok(parse_identity_property(parser)
409 .map(|p| Some(ColumnOption::Identity(IdentityPropertyKind::Identity(p)))))
410 } else if parser.parse_keyword(Keyword::AUTOINCREMENT) {
411 Ok(parse_identity_property(parser).map(|p| {
412 Some(ColumnOption::Identity(IdentityPropertyKind::Autoincrement(
413 p,
414 )))
415 }))
416 } else if parser.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
417 Ok(parse_column_policy_property(parser, with)
418 .map(|p| Some(ColumnOption::Policy(ColumnPolicy::MaskingPolicy(p)))))
419 } else if parser.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
420 Ok(parse_column_policy_property(parser, with)
421 .map(|p| Some(ColumnOption::Policy(ColumnPolicy::ProjectionPolicy(p)))))
422 } else if parser.parse_keywords(&[Keyword::TAG]) {
423 Ok(parse_column_tags(parser, with).map(|p| Some(ColumnOption::Tags(p))))
424 } else {
425 Err(ParserError::ParserError("not found match".to_string()))
426 }
427 })
428 }
429
430 fn get_next_precedence(&self, parser: &Parser) -> Option<Result<u8, ParserError>> {
431 let token = parser.peek_token_ref();
432 match &token.token {
434 Token::Colon => Some(Ok(self.prec_value(Precedence::DoubleColon))),
435 _ => None,
436 }
437 }
438
439 fn describe_requires_table_keyword(&self) -> bool {
440 true
441 }
442
443 fn allow_extract_custom(&self) -> bool {
444 true
445 }
446
447 fn allow_extract_single_quotes(&self) -> bool {
448 true
449 }
450
451 fn supports_show_like_before_in(&self) -> bool {
454 true
455 }
456
457 fn supports_left_associative_joins_without_parens(&self) -> bool {
458 false
459 }
460
461 fn is_reserved_for_identifier(&self, kw: Keyword) -> bool {
462 if matches!(kw, Keyword::INTERVAL) {
465 false
466 } else {
467 RESERVED_FOR_IDENTIFIER.contains(&kw)
468 }
469 }
470
471 fn supports_partiql(&self) -> bool {
472 true
473 }
474
475 fn is_column_alias(&self, kw: &Keyword, parser: &mut Parser) -> bool {
476 match kw {
477 Keyword::EXCEPT
481 | Keyword::RETURNING if !matches!(parser.peek_token_ref().token, Token::Comma | Token::EOF) =>
483 {
484 false
485 }
486
487 Keyword::LIMIT | Keyword::OFFSET if peek_for_limit_options(parser) => false,
490
491 Keyword::FETCH if parser.peek_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]).is_some()
496 || peek_for_limit_options(parser) =>
497 {
498 false
499 }
500
501 Keyword::FROM
505 | Keyword::GROUP
506 | Keyword::HAVING
507 | Keyword::INTERSECT
508 | Keyword::INTO
509 | Keyword::MINUS
510 | Keyword::ORDER
511 | Keyword::SELECT
512 | Keyword::UNION
513 | Keyword::WHERE
514 | Keyword::WITH => false,
515
516 _ => true,
518 }
519 }
520
521 fn is_table_alias(&self, kw: &Keyword, parser: &mut Parser) -> bool {
522 match kw {
523 Keyword::RETURNING
526 | Keyword::INNER
527 | Keyword::USING
528 | Keyword::PIVOT
529 | Keyword::UNPIVOT
530 | Keyword::EXCEPT
531 | Keyword::MATCH_RECOGNIZE
532 if !matches!(parser.peek_token_ref().token, Token::SemiColon | Token::EOF) =>
533 {
534 false
535 }
536
537 Keyword::LIMIT | Keyword::OFFSET if peek_for_limit_options(parser) => false,
541
542 Keyword::FETCH
547 if parser
548 .peek_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT])
549 .is_some()
550 || peek_for_limit_options(parser) =>
551 {
552 false
553 }
554
555 Keyword::RIGHT | Keyword::LEFT | Keyword::SEMI | Keyword::ANTI
558 if parser
559 .peek_one_of_keywords(&[Keyword::JOIN, Keyword::OUTER])
560 .is_some() =>
561 {
562 false
563 }
564
565 Keyword::GLOBAL if parser.peek_keyword(Keyword::FULL) => false,
566
567 Keyword::WITH
571 | Keyword::ORDER
572 | Keyword::SELECT
573 | Keyword::WHERE
574 | Keyword::GROUP
575 | Keyword::HAVING
576 | Keyword::LATERAL
577 | Keyword::UNION
578 | Keyword::INTERSECT
579 | Keyword::MINUS
580 | Keyword::ON
581 | Keyword::JOIN
582 | Keyword::INNER
583 | Keyword::CROSS
584 | Keyword::FULL
585 | Keyword::LEFT
586 | Keyword::RIGHT
587 | Keyword::NATURAL
588 | Keyword::USING
589 | Keyword::ASOF
590 | Keyword::MATCH_CONDITION
591 | Keyword::SET
592 | Keyword::QUALIFY
593 | Keyword::FOR
594 | Keyword::START
595 | Keyword::CONNECT
596 | Keyword::SAMPLE
597 | Keyword::TABLESAMPLE
598 | Keyword::FROM => false,
599
600 _ => true,
602 }
603 }
604
605 fn is_table_factor(&self, kw: &Keyword, parser: &mut Parser) -> bool {
606 match kw {
607 Keyword::LIMIT if peek_for_limit_options(parser) => false,
608 Keyword::TABLE if matches!(parser.peek_token_ref().token, Token::LParen) => true,
610 _ => !RESERVED_KEYWORDS_FOR_TABLE_FACTOR.contains(kw),
611 }
612 }
613
614 fn supports_table_versioning(&self) -> bool {
616 true
617 }
618
619 fn supports_group_by_expr(&self) -> bool {
621 true
622 }
623
624 fn get_reserved_keywords_for_select_item_operator(&self) -> &[Keyword] {
626 &RESERVED_KEYWORDS_FOR_SELECT_ITEM_OPERATOR
627 }
628
629 fn supports_space_separated_column_options(&self) -> bool {
630 true
631 }
632
633 fn supports_comma_separated_drop_column_list(&self) -> bool {
634 true
635 }
636
637 fn is_identifier_generating_function_name(
638 &self,
639 ident: &Ident,
640 name_parts: &[ObjectNamePart],
641 ) -> bool {
642 ident.quote_style.is_none()
643 && ident.value.to_lowercase() == "identifier"
644 && !name_parts
645 .iter()
646 .any(|p| matches!(p, ObjectNamePart::Function(_)))
647 }
648
649 fn supports_select_expr_star(&self) -> bool {
651 true
652 }
653
654 fn supports_select_wildcard_exclude(&self) -> bool {
655 true
656 }
657
658 fn supports_semantic_view_table_factor(&self) -> bool {
659 true
660 }
661
662 fn supports_select_wildcard_replace(&self) -> bool {
664 true
665 }
666
667 fn supports_select_wildcard_ilike(&self) -> bool {
669 true
670 }
671
672 fn supports_select_wildcard_rename(&self) -> bool {
674 true
675 }
676
677 fn supports_lambda_functions(&self) -> bool {
679 true
680 }
681
682 fn supports_comma_separated_trim(&self) -> bool {
683 true
684 }
685}
686
687fn peek_for_limit_options(parser: &Parser) -> bool {
690 match &parser.peek_token_ref().token {
691 Token::Number(_, _) | Token::Placeholder(_) => true,
692 Token::SingleQuotedString(val) if val.is_empty() => true,
693 Token::DollarQuotedString(DollarQuotedString { value, .. }) if value.is_empty() => true,
694 Token::Word(w) if w.keyword == Keyword::NULL => true,
695 _ => false,
696 }
697}
698
699fn parse_file_staging_command(kw: Keyword, parser: &mut Parser) -> Result<Statement, ParserError> {
700 let stage = parse_snowflake_stage_name(parser)?;
701 let pattern = if parser.parse_keyword(Keyword::PATTERN) {
702 parser.expect_token(&Token::Eq)?;
703 Some(parser.parse_literal_string()?)
704 } else {
705 None
706 };
707
708 match kw {
709 Keyword::LIST | Keyword::LS => Ok(Statement::List(FileStagingCommand { stage, pattern })),
710 Keyword::REMOVE | Keyword::RM => {
711 Ok(Statement::Remove(FileStagingCommand { stage, pattern }))
712 }
713 _ => Err(ParserError::ParserError(
714 "unexpected stage command, expecting LIST, LS, REMOVE or RM".to_string(),
715 )),
716 }
717}
718
719fn parse_alter_dynamic_table(parser: &mut Parser) -> Result<Statement, ParserError> {
722 let table_name = parser.parse_object_name(true)?;
724
725 let operation = if parser.parse_keyword(Keyword::REFRESH) {
727 AlterTableOperation::Refresh { subpath: None }
728 } else if parser.parse_keyword(Keyword::SUSPEND) {
729 AlterTableOperation::Suspend
730 } else if parser.parse_keyword(Keyword::RESUME) {
731 AlterTableOperation::Resume
732 } else {
733 return parser.expected_ref(
734 "REFRESH, SUSPEND, or RESUME after ALTER DYNAMIC TABLE",
735 parser.peek_token_ref(),
736 );
737 };
738
739 let end_token = if parser.peek_token_ref().token == Token::SemiColon {
740 parser.peek_token_ref().clone()
741 } else {
742 parser.get_current_token().clone()
743 };
744
745 Ok(Statement::AlterTable(AlterTable {
746 name: table_name,
747 if_exists: false,
748 only: false,
749 operations: vec![operation],
750 location: None,
751 on_cluster: None,
752 table_type: Some(AlterTableType::Dynamic),
753 end_token: AttachedToken(end_token),
754 }))
755}
756
757fn parse_alter_external_table(parser: &mut Parser) -> Result<Statement, ParserError> {
760 let if_exists = parser.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
761 let table_name = parser.parse_object_name(true)?;
762
763 let operation = if parser.parse_keyword(Keyword::REFRESH) {
765 let subpath = match parser.peek_token().token {
767 Token::SingleQuotedString(s) => {
768 parser.next_token();
769 Some(s)
770 }
771 _ => None,
772 };
773 AlterTableOperation::Refresh { subpath }
774 } else {
775 return parser.expected_ref(
776 "REFRESH after ALTER EXTERNAL TABLE",
777 parser.peek_token_ref(),
778 );
779 };
780
781 let end_token = if parser.peek_token_ref().token == Token::SemiColon {
782 parser.peek_token_ref().clone()
783 } else {
784 parser.get_current_token().clone()
785 };
786
787 Ok(Statement::AlterTable(AlterTable {
788 name: table_name,
789 if_exists,
790 only: false,
791 operations: vec![operation],
792 location: None,
793 on_cluster: None,
794 table_type: Some(AlterTableType::External),
795 end_token: AttachedToken(end_token),
796 }))
797}
798
799fn parse_alter_session(parser: &mut Parser, set: bool) -> Result<Statement, ParserError> {
802 let session_options = parse_session_options(parser, set)?;
803 Ok(Statement::AlterSession {
804 set,
805 session_params: KeyValueOptions {
806 options: session_options,
807 delimiter: KeyValueOptionsDelimiter::Space,
808 },
809 })
810}
811
812#[allow(clippy::too_many_arguments)]
816pub fn parse_create_table(
817 or_replace: bool,
818 global: Option<bool>,
819 temporary: bool,
820 volatile: bool,
821 transient: bool,
822 iceberg: bool,
823 dynamic: bool,
824 parser: &mut Parser,
825) -> Result<CreateTable, ParserError> {
826 let if_not_exists = parser.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
827 let table_name = parser.parse_object_name(false)?;
828
829 let mut builder = CreateTableBuilder::new(table_name)
830 .or_replace(or_replace)
831 .if_not_exists(if_not_exists)
832 .temporary(temporary)
833 .transient(transient)
834 .volatile(volatile)
835 .iceberg(iceberg)
836 .global(global)
837 .dynamic(dynamic)
838 .hive_formats(None);
839
840 let mut plain_options = vec![];
847
848 loop {
849 let next_token = parser.next_token();
850 match &next_token.token {
851 Token::Word(word) => match word.keyword {
852 Keyword::COPY => {
853 parser.expect_keyword_is(Keyword::GRANTS)?;
854 builder = builder.copy_grants(true);
855 }
856 Keyword::COMMENT => {
857 parser.prev_token();
859 if let Some(comment_def) = parser.parse_optional_inline_comment()? {
860 plain_options.push(SqlOption::Comment(comment_def))
861 }
862 }
863 Keyword::AS => {
864 let query = parser.parse_query()?;
865 builder = builder.query(Some(query));
866 }
867 Keyword::CLONE => {
868 let clone = parser.parse_object_name(false).ok();
869 builder = builder.clone_clause(clone);
870 }
871 Keyword::LIKE => {
872 let name = parser.parse_object_name(false)?;
873 builder = builder.like(Some(CreateTableLikeKind::Plain(
874 crate::ast::CreateTableLike {
875 name,
876 defaults: None,
877 },
878 )));
879 }
880 Keyword::CLUSTER => {
881 parser.expect_keyword_is(Keyword::BY)?;
882 parser.expect_token(&Token::LParen)?;
883 let cluster_by = Some(WrappedCollection::Parentheses(
884 parser.parse_comma_separated(|p| p.parse_expr())?,
885 ));
886 parser.expect_token(&Token::RParen)?;
887
888 builder = builder.cluster_by(cluster_by)
889 }
890 Keyword::ENABLE_SCHEMA_EVOLUTION => {
891 parser.expect_token(&Token::Eq)?;
892 builder = builder.enable_schema_evolution(Some(parser.parse_boolean_string()?));
893 }
894 Keyword::CHANGE_TRACKING => {
895 parser.expect_token(&Token::Eq)?;
896 builder = builder.change_tracking(Some(parser.parse_boolean_string()?));
897 }
898 Keyword::DATA_RETENTION_TIME_IN_DAYS => {
899 parser.expect_token(&Token::Eq)?;
900 let data_retention_time_in_days = parser.parse_literal_uint()?;
901 builder =
902 builder.data_retention_time_in_days(Some(data_retention_time_in_days));
903 }
904 Keyword::MAX_DATA_EXTENSION_TIME_IN_DAYS => {
905 parser.expect_token(&Token::Eq)?;
906 let max_data_extension_time_in_days = parser.parse_literal_uint()?;
907 builder = builder
908 .max_data_extension_time_in_days(Some(max_data_extension_time_in_days));
909 }
910 Keyword::DEFAULT_DDL_COLLATION => {
911 parser.expect_token(&Token::Eq)?;
912 let default_ddl_collation = parser.parse_literal_string()?;
913 builder = builder.default_ddl_collation(Some(default_ddl_collation));
914 }
915 Keyword::WITH => {
918 parser.expect_one_of_keywords(&[
919 Keyword::AGGREGATION,
920 Keyword::STORAGE,
921 Keyword::TAG,
922 Keyword::ROW,
923 ])?;
924 parser.prev_token();
925 }
926 Keyword::AGGREGATION => {
927 parser.expect_keyword_is(Keyword::POLICY)?;
928 let aggregation_policy = parser.parse_object_name(false)?;
929 builder = builder.with_aggregation_policy(Some(aggregation_policy));
930 }
931 Keyword::ROW => {
932 parser.expect_keywords(&[Keyword::ACCESS, Keyword::POLICY])?;
933 let policy = parser.parse_object_name(false)?;
934 parser.expect_keyword_is(Keyword::ON)?;
935 parser.expect_token(&Token::LParen)?;
936 let columns = parser.parse_comma_separated(|p| p.parse_identifier())?;
937 parser.expect_token(&Token::RParen)?;
938
939 builder =
940 builder.with_row_access_policy(Some(RowAccessPolicy::new(policy, columns)))
941 }
942 Keyword::STORAGE => {
943 parser.expect_keywords(&[Keyword::LIFECYCLE, Keyword::POLICY])?;
944 let policy = parser.parse_object_name(false)?;
945 parser.expect_keyword_is(Keyword::ON)?;
946 parser.expect_token(&Token::LParen)?;
947 let columns = parser.parse_comma_separated(|p| p.parse_identifier())?;
948 parser.expect_token(&Token::RParen)?;
949
950 builder = builder.with_storage_lifecycle_policy(Some(StorageLifecyclePolicy {
951 policy,
952 on: columns,
953 }))
954 }
955 Keyword::TAG => {
956 parser.expect_token(&Token::LParen)?;
957 let tags = parser.parse_comma_separated(Parser::parse_tag)?;
958 parser.expect_token(&Token::RParen)?;
959 builder = builder.with_tags(Some(tags));
960 }
961 Keyword::ON if parser.parse_keyword(Keyword::COMMIT) => {
962 let on_commit = Some(parser.parse_create_table_on_commit()?);
963 builder = builder.on_commit(on_commit);
964 }
965 Keyword::EXTERNAL_VOLUME => {
966 parser.expect_token(&Token::Eq)?;
967 builder.external_volume = Some(parser.parse_literal_string()?);
968 }
969 Keyword::CATALOG => {
970 parser.expect_token(&Token::Eq)?;
971 builder.catalog = Some(parser.parse_literal_string()?);
972 }
973 Keyword::BASE_LOCATION => {
974 parser.expect_token(&Token::Eq)?;
975 builder.base_location = Some(parser.parse_literal_string()?);
976 }
977 Keyword::CATALOG_SYNC => {
978 parser.expect_token(&Token::Eq)?;
979 builder.catalog_sync = Some(parser.parse_literal_string()?);
980 }
981 Keyword::STORAGE_SERIALIZATION_POLICY => {
982 parser.expect_token(&Token::Eq)?;
983
984 builder.storage_serialization_policy =
985 Some(parse_storage_serialization_policy(parser)?);
986 }
987 Keyword::IF if parser.parse_keywords(&[Keyword::NOT, Keyword::EXISTS]) => {
988 builder = builder.if_not_exists(true);
989 }
990 Keyword::TARGET_LAG => {
991 parser.expect_token(&Token::Eq)?;
992 let target_lag = parser.parse_literal_string()?;
993 builder = builder.target_lag(Some(target_lag));
994 }
995 Keyword::WAREHOUSE => {
996 parser.expect_token(&Token::Eq)?;
997 let warehouse = parser.parse_identifier()?;
998 builder = builder.warehouse(Some(warehouse));
999 }
1000 Keyword::AT | Keyword::BEFORE => {
1001 parser.prev_token();
1002 let version = parser.maybe_parse_table_version()?;
1003 builder = builder.version(version);
1004 }
1005 Keyword::REFRESH_MODE => {
1006 parser.expect_token(&Token::Eq)?;
1007 let refresh_mode = match parser.parse_one_of_keywords(&[
1008 Keyword::AUTO,
1009 Keyword::FULL,
1010 Keyword::INCREMENTAL,
1011 ]) {
1012 Some(Keyword::AUTO) => Some(RefreshModeKind::Auto),
1013 Some(Keyword::FULL) => Some(RefreshModeKind::Full),
1014 Some(Keyword::INCREMENTAL) => Some(RefreshModeKind::Incremental),
1015 _ => return parser.expected("AUTO, FULL or INCREMENTAL", next_token),
1016 };
1017 builder = builder.refresh_mode(refresh_mode);
1018 }
1019 Keyword::INITIALIZE => {
1020 parser.expect_token(&Token::Eq)?;
1021 let initialize = match parser
1022 .parse_one_of_keywords(&[Keyword::ON_CREATE, Keyword::ON_SCHEDULE])
1023 {
1024 Some(Keyword::ON_CREATE) => Some(InitializeKind::OnCreate),
1025 Some(Keyword::ON_SCHEDULE) => Some(InitializeKind::OnSchedule),
1026 _ => return parser.expected("ON_CREATE or ON_SCHEDULE", next_token),
1027 };
1028 builder = builder.initialize(initialize);
1029 }
1030 Keyword::REQUIRE if parser.parse_keyword(Keyword::USER) => {
1031 builder = builder.require_user(true);
1032 }
1033 _ => {
1034 return parser.expected("end of statement", next_token);
1035 }
1036 },
1037 Token::LParen => {
1038 parser.prev_token();
1039 let (columns, constraints) = parser.parse_columns()?;
1040 builder = builder.columns(columns).constraints(constraints);
1041 }
1042 Token::EOF => {
1043 break;
1044 }
1045 Token::SemiColon => {
1046 parser.prev_token();
1047 break;
1048 }
1049 _ => {
1050 return parser.expected("end of statement", next_token);
1051 }
1052 }
1053 }
1054 let table_options = if !plain_options.is_empty() {
1055 crate::ast::CreateTableOptions::Plain(plain_options)
1056 } else {
1057 crate::ast::CreateTableOptions::None
1058 };
1059
1060 builder = builder.table_options(table_options);
1061
1062 if iceberg && builder.base_location.is_none() {
1063 return Err(ParserError::ParserError(
1064 "BASE_LOCATION is required for ICEBERG tables".to_string(),
1065 ));
1066 }
1067
1068 Ok(builder.build())
1069}
1070
1071pub fn parse_create_database(
1074 or_replace: bool,
1075 transient: bool,
1076 parser: &mut Parser,
1077) -> Result<Statement, ParserError> {
1078 let if_not_exists = parser.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
1079 let name = parser.parse_object_name(false)?;
1080
1081 let mut builder = CreateDatabaseBuilder::new(name)
1082 .or_replace(or_replace)
1083 .transient(transient)
1084 .if_not_exists(if_not_exists);
1085
1086 loop {
1087 let next_token = parser.next_token();
1088 match &next_token.token {
1089 Token::Word(word) => match word.keyword {
1090 Keyword::CLONE => {
1091 builder = builder.clone_clause(Some(parser.parse_object_name(false)?));
1092 }
1093 Keyword::DATA_RETENTION_TIME_IN_DAYS => {
1094 parser.expect_token(&Token::Eq)?;
1095 builder =
1096 builder.data_retention_time_in_days(Some(parser.parse_literal_uint()?));
1097 }
1098 Keyword::MAX_DATA_EXTENSION_TIME_IN_DAYS => {
1099 parser.expect_token(&Token::Eq)?;
1100 builder =
1101 builder.max_data_extension_time_in_days(Some(parser.parse_literal_uint()?));
1102 }
1103 Keyword::EXTERNAL_VOLUME => {
1104 parser.expect_token(&Token::Eq)?;
1105 builder = builder.external_volume(Some(parser.parse_literal_string()?));
1106 }
1107 Keyword::CATALOG => {
1108 parser.expect_token(&Token::Eq)?;
1109 builder = builder.catalog(Some(parser.parse_literal_string()?));
1110 }
1111 Keyword::REPLACE_INVALID_CHARACTERS => {
1112 parser.expect_token(&Token::Eq)?;
1113 builder =
1114 builder.replace_invalid_characters(Some(parser.parse_boolean_string()?));
1115 }
1116 Keyword::DEFAULT_DDL_COLLATION => {
1117 parser.expect_token(&Token::Eq)?;
1118 builder = builder.default_ddl_collation(Some(parser.parse_literal_string()?));
1119 }
1120 Keyword::STORAGE_SERIALIZATION_POLICY => {
1121 parser.expect_token(&Token::Eq)?;
1122 let policy = parse_storage_serialization_policy(parser)?;
1123 builder = builder.storage_serialization_policy(Some(policy));
1124 }
1125 Keyword::COMMENT => {
1126 parser.expect_token(&Token::Eq)?;
1127 builder = builder.comment(Some(parser.parse_literal_string()?));
1128 }
1129 Keyword::CATALOG_SYNC => {
1130 parser.expect_token(&Token::Eq)?;
1131 builder = builder.catalog_sync(Some(parser.parse_literal_string()?));
1132 }
1133 Keyword::CATALOG_SYNC_NAMESPACE_FLATTEN_DELIMITER => {
1134 parser.expect_token(&Token::Eq)?;
1135 builder = builder.catalog_sync_namespace_flatten_delimiter(Some(
1136 parser.parse_literal_string()?,
1137 ));
1138 }
1139 Keyword::CATALOG_SYNC_NAMESPACE_MODE => {
1140 parser.expect_token(&Token::Eq)?;
1141 let mode =
1142 match parser.parse_one_of_keywords(&[Keyword::NEST, Keyword::FLATTEN]) {
1143 Some(Keyword::NEST) => CatalogSyncNamespaceMode::Nest,
1144 Some(Keyword::FLATTEN) => CatalogSyncNamespaceMode::Flatten,
1145 _ => {
1146 return parser.expected("NEST or FLATTEN", next_token);
1147 }
1148 };
1149 builder = builder.catalog_sync_namespace_mode(Some(mode));
1150 }
1151 Keyword::WITH => {
1152 if parser.parse_keyword(Keyword::TAG) {
1153 parser.expect_token(&Token::LParen)?;
1154 let tags = parser.parse_comma_separated(Parser::parse_tag)?;
1155 parser.expect_token(&Token::RParen)?;
1156 builder = builder.with_tags(Some(tags));
1157 } else if parser.parse_keyword(Keyword::CONTACT) {
1158 parser.expect_token(&Token::LParen)?;
1159 let contacts = parser.parse_comma_separated(|p| {
1160 let purpose = p.parse_identifier()?.value;
1161 p.expect_token(&Token::Eq)?;
1162 let contact = p.parse_identifier()?.value;
1163 Ok(ContactEntry { purpose, contact })
1164 })?;
1165 parser.expect_token(&Token::RParen)?;
1166 builder = builder.with_contacts(Some(contacts));
1167 } else {
1168 return parser.expected("TAG or CONTACT", next_token);
1169 }
1170 }
1171 _ => return parser.expected("end of statement", next_token),
1172 },
1173 Token::SemiColon | Token::EOF => break,
1174 _ => return parser.expected("end of statement", next_token),
1175 }
1176 }
1177 Ok(builder.build())
1178}
1179
1180pub fn parse_storage_serialization_policy(
1181 parser: &mut Parser,
1182) -> Result<StorageSerializationPolicy, ParserError> {
1183 let next_token = parser.next_token();
1184 match &next_token.token {
1185 Token::Word(w) => match w.keyword {
1186 Keyword::COMPATIBLE => Ok(StorageSerializationPolicy::Compatible),
1187 Keyword::OPTIMIZED => Ok(StorageSerializationPolicy::Optimized),
1188 _ => parser.expected("storage_serialization_policy", next_token),
1189 },
1190 _ => parser.expected("storage_serialization_policy", next_token),
1191 }
1192}
1193
1194pub fn parse_create_stage(
1195 or_replace: bool,
1196 temporary: bool,
1197 parser: &mut Parser,
1198) -> Result<Statement, ParserError> {
1199 let if_not_exists = parser.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
1201 let name = parser.parse_object_name(false)?;
1202 let mut directory_table_params = Vec::new();
1203 let mut file_format = Vec::new();
1204 let mut copy_options = Vec::new();
1205 let mut comment = None;
1206
1207 let stage_params = parse_stage_params(parser)?;
1209
1210 if parser.parse_keyword(Keyword::DIRECTORY) {
1212 parser.expect_token(&Token::Eq)?;
1213 directory_table_params = parser.parse_key_value_options(true, &[])?.options;
1214 }
1215
1216 if parser.parse_keyword(Keyword::FILE_FORMAT) {
1218 parser.expect_token(&Token::Eq)?;
1219 file_format = parser.parse_key_value_options(true, &[])?.options;
1220 }
1221
1222 if parser.parse_keyword(Keyword::COPY_OPTIONS) {
1224 parser.expect_token(&Token::Eq)?;
1225 copy_options = parser.parse_key_value_options(true, &[])?.options;
1226 }
1227
1228 if parser.parse_keyword(Keyword::COMMENT) {
1230 parser.expect_token(&Token::Eq)?;
1231 comment = Some(parser.parse_comment_value()?);
1232 }
1233
1234 Ok(Statement::CreateStage {
1235 or_replace,
1236 temporary,
1237 if_not_exists,
1238 name,
1239 stage_params,
1240 directory_table_params: KeyValueOptions {
1241 options: directory_table_params,
1242 delimiter: KeyValueOptionsDelimiter::Space,
1243 },
1244 file_format: KeyValueOptions {
1245 options: file_format,
1246 delimiter: KeyValueOptionsDelimiter::Space,
1247 },
1248 copy_options: KeyValueOptions {
1249 options: copy_options,
1250 delimiter: KeyValueOptionsDelimiter::Space,
1251 },
1252 comment,
1253 })
1254}
1255
1256pub fn parse_stage_name_identifier(parser: &mut Parser) -> Result<Ident, ParserError> {
1257 let mut ident = String::new();
1258 while let Some(next_token) = parser.next_token_no_skip() {
1259 match &next_token.token {
1260 Token::Whitespace(_) | Token::SemiColon => break,
1261 Token::Period => {
1262 parser.prev_token();
1263 break;
1264 }
1265 Token::LParen | Token::RParen => {
1266 parser.prev_token();
1267 break;
1268 }
1269 Token::AtSign => ident.push('@'),
1270 Token::Tilde => ident.push('~'),
1271 Token::Mod => ident.push('%'),
1272 Token::Div => ident.push('/'),
1273 Token::Plus => ident.push('+'),
1274 Token::Minus => ident.push('-'),
1275 Token::Eq => ident.push('='),
1276 Token::Colon => ident.push(':'),
1277 Token::Number(n, _) => ident.push_str(n),
1278 Token::Word(w) => ident.push_str(&w.to_string()),
1279 _ => return parser.expected_ref("stage name identifier", parser.peek_token_ref()),
1280 }
1281 }
1282 Ok(Ident::new(ident))
1283}
1284
1285pub fn parse_snowflake_stage_name(parser: &mut Parser) -> Result<ObjectName, ParserError> {
1288 match parser.next_token().token {
1289 Token::AtSign => {
1290 parser.prev_token();
1291 let mut idents = vec![];
1292 loop {
1293 idents.push(parse_stage_name_identifier(parser)?);
1294 if !parser.consume_token(&Token::Period) {
1295 break;
1296 }
1297 }
1298 Ok(ObjectName::from(idents))
1299 }
1300 _ => {
1301 parser.prev_token();
1302 Ok(parser.parse_object_name(false)?)
1303 }
1304 }
1305}
1306
1307pub fn parse_copy_into(parser: &mut Parser) -> Result<Statement, ParserError> {
1310 let kind = match &parser.peek_token_ref().token {
1311 Token::AtSign => CopyIntoSnowflakeKind::Location,
1313 Token::SingleQuotedString(s) if s.contains("://") => CopyIntoSnowflakeKind::Location,
1315 _ => CopyIntoSnowflakeKind::Table,
1316 };
1317
1318 let mut files: Vec<String> = vec![];
1319 let mut from_transformations: Option<Vec<StageLoadSelectItemKind>> = None;
1320 let mut from_stage_alias = None;
1321 let mut from_stage = None;
1322 let mut stage_params = StageParamsObject {
1323 url: None,
1324 encryption: KeyValueOptions {
1325 options: vec![],
1326 delimiter: KeyValueOptionsDelimiter::Space,
1327 },
1328 endpoint: None,
1329 storage_integration: None,
1330 credentials: KeyValueOptions {
1331 options: vec![],
1332 delimiter: KeyValueOptionsDelimiter::Space,
1333 },
1334 };
1335 let mut from_query = None;
1336 let mut partition = None;
1337 let mut file_format = Vec::new();
1338 let mut pattern = None;
1339 let mut validation_mode = None;
1340 let mut copy_options = Vec::new();
1341
1342 let into: ObjectName = parse_snowflake_stage_name(parser)?;
1343 if kind == CopyIntoSnowflakeKind::Location {
1344 stage_params = parse_stage_params(parser)?;
1345 }
1346
1347 let into_columns = match &parser.peek_token().token {
1348 Token::LParen => Some(parser.parse_parenthesized_column_list(IsOptional::Optional, true)?),
1349 _ => None,
1350 };
1351
1352 parser.expect_keyword_is(Keyword::FROM)?;
1353 match parser.next_token().token {
1354 Token::LParen if kind == CopyIntoSnowflakeKind::Table => {
1355 parser.expect_keyword_is(Keyword::SELECT)?;
1357 from_transformations = parse_select_items_for_data_load(parser)?;
1358
1359 parser.expect_keyword_is(Keyword::FROM)?;
1360 from_stage = Some(parse_snowflake_stage_name(parser)?);
1361 stage_params = parse_stage_params(parser)?;
1362
1363 from_stage_alias = parser
1365 .maybe_parse_table_alias()?
1366 .map(|table_alias| table_alias.name);
1367 parser.expect_token(&Token::RParen)?;
1368 }
1369 Token::LParen if kind == CopyIntoSnowflakeKind::Location => {
1370 from_query = Some(parser.parse_query()?);
1372 parser.expect_token(&Token::RParen)?;
1373 }
1374 _ => {
1375 parser.prev_token();
1376 from_stage = Some(parse_snowflake_stage_name(parser)?);
1377 stage_params = parse_stage_params(parser)?;
1378
1379 from_stage_alias = if parser.parse_keyword(Keyword::AS) {
1381 Some(match parser.next_token().token {
1382 Token::Word(w) => Ok(Ident::new(w.value)),
1383 _ => parser.expected_ref("stage alias", parser.peek_token_ref()),
1384 }?)
1385 } else {
1386 None
1387 };
1388 }
1389 }
1390
1391 loop {
1392 if parser.parse_keyword(Keyword::FILE_FORMAT) {
1394 parser.expect_token(&Token::Eq)?;
1395 file_format = parser.parse_key_value_options(true, &[])?.options;
1396 } else if parser.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
1398 partition = Some(Box::new(parser.parse_expr()?))
1399 } else if parser.parse_keyword(Keyword::FILES) {
1401 parser.expect_token(&Token::Eq)?;
1402 parser.expect_token(&Token::LParen)?;
1403 let mut continue_loop = true;
1404 while continue_loop {
1405 continue_loop = false;
1406 let next_token = parser.next_token();
1407 match next_token.token {
1408 Token::SingleQuotedString(s) => files.push(s),
1409 _ => parser.expected("file token", next_token)?,
1410 };
1411 if parser.next_token().token.eq(&Token::Comma) {
1412 continue_loop = true;
1413 } else {
1414 parser.prev_token(); }
1416 }
1417 parser.expect_token(&Token::RParen)?;
1418 } else if parser.parse_keyword(Keyword::PATTERN) {
1420 parser.expect_token(&Token::Eq)?;
1421 let next_token = parser.next_token();
1422 pattern = Some(match next_token.token {
1423 Token::SingleQuotedString(s) => s,
1424 _ => parser.expected("pattern", next_token)?,
1425 });
1426 } else if parser.parse_keyword(Keyword::VALIDATION_MODE) {
1428 parser.expect_token(&Token::Eq)?;
1429 validation_mode = Some(parser.next_token().token.to_string());
1430 } else if parser.parse_keyword(Keyword::COPY_OPTIONS) {
1432 parser.expect_token(&Token::Eq)?;
1433 copy_options = parser.parse_key_value_options(true, &[])?.options;
1434 } else {
1435 match parser.next_token().token {
1436 Token::SemiColon | Token::EOF => break,
1437 Token::Comma => continue,
1438 Token::Word(key) => copy_options.push(parser.parse_key_value_option(&key)?),
1441 _ => {
1442 return parser
1443 .expected_ref("another copy option, ; or EOF'", parser.peek_token_ref())
1444 }
1445 }
1446 }
1447 }
1448
1449 Ok(Statement::CopyIntoSnowflake {
1450 kind,
1451 into,
1452 into_columns,
1453 from_obj: from_stage,
1454 from_obj_alias: from_stage_alias,
1455 stage_params,
1456 from_transformations,
1457 from_query,
1458 files: if files.is_empty() { None } else { Some(files) },
1459 pattern,
1460 file_format: KeyValueOptions {
1461 options: file_format,
1462 delimiter: KeyValueOptionsDelimiter::Space,
1463 },
1464 copy_options: KeyValueOptions {
1465 options: copy_options,
1466 delimiter: KeyValueOptionsDelimiter::Space,
1467 },
1468 validation_mode,
1469 partition,
1470 })
1471}
1472
1473fn parse_select_items_for_data_load(
1474 parser: &mut Parser,
1475) -> Result<Option<Vec<StageLoadSelectItemKind>>, ParserError> {
1476 let mut select_items: Vec<StageLoadSelectItemKind> = vec![];
1477 loop {
1478 match parser.maybe_parse(parse_select_item_for_data_load)? {
1479 Some(item) => select_items.push(StageLoadSelectItemKind::StageLoadSelectItem(item)),
1481 None => select_items.push(StageLoadSelectItemKind::SelectItem(
1483 parser.parse_select_item()?,
1484 )),
1485 }
1486 if matches!(parser.peek_token_ref().token, Token::Comma) {
1487 parser.advance_token();
1488 } else {
1489 break;
1490 }
1491 }
1492 Ok(Some(select_items))
1493}
1494
1495fn parse_select_item_for_data_load(
1496 parser: &mut Parser,
1497) -> Result<StageLoadSelectItem, ParserError> {
1498 let mut alias: Option<Ident> = None;
1499 let mut file_col_num: i32 = 0;
1500 let mut element: Option<Ident> = None;
1501 let mut item_as: Option<Ident> = None;
1502
1503 let next_token = parser.next_token();
1504 match next_token.token {
1505 Token::Placeholder(w) => {
1506 file_col_num = w.to_string().split_off(1).parse::<i32>().map_err(|e| {
1507 ParserError::ParserError(format!("Could not parse '{w}' as i32: {e}"))
1508 })?;
1509 Ok(())
1510 }
1511 Token::Word(w) => {
1512 alias = Some(Ident::new(w.value));
1513 Ok(())
1514 }
1515 _ => parser.expected("alias or file_col_num", next_token),
1516 }?;
1517
1518 if alias.is_some() {
1519 parser.expect_token(&Token::Period)?;
1520 let col_num_token = parser.next_token();
1522 match col_num_token.token {
1523 Token::Placeholder(w) => {
1524 file_col_num = w.to_string().split_off(1).parse::<i32>().map_err(|e| {
1525 ParserError::ParserError(format!("Could not parse '{w}' as i32: {e}"))
1526 })?;
1527 Ok(())
1528 }
1529 _ => parser.expected("file_col_num", col_num_token),
1530 }?;
1531 }
1532
1533 match parser.next_token().token {
1535 Token::Colon => {
1536 element = Some(Ident::new(match parser.next_token().token {
1538 Token::Word(w) => Ok(w.value),
1539 _ => parser.expected_ref("file_col_num", parser.peek_token_ref()),
1540 }?));
1541 }
1542 _ => {
1543 parser.prev_token();
1545 }
1546 }
1547
1548 if parser.parse_keyword(Keyword::AS) {
1550 item_as = Some(match parser.next_token().token {
1551 Token::Word(w) => Ok(Ident::new(w.value)),
1552 _ => parser.expected_ref("column item alias", parser.peek_token_ref()),
1553 }?);
1554 }
1555
1556 Ok(StageLoadSelectItem {
1557 alias,
1558 file_col_num,
1559 element,
1560 item_as,
1561 })
1562}
1563
1564fn parse_stage_params(parser: &mut Parser) -> Result<StageParamsObject, ParserError> {
1565 let (mut url, mut storage_integration, mut endpoint) = (None, None, None);
1566 let mut encryption: KeyValueOptions = KeyValueOptions {
1567 options: vec![],
1568 delimiter: KeyValueOptionsDelimiter::Space,
1569 };
1570 let mut credentials: KeyValueOptions = KeyValueOptions {
1571 options: vec![],
1572 delimiter: KeyValueOptionsDelimiter::Space,
1573 };
1574
1575 if parser.parse_keyword(Keyword::URL) {
1577 parser.expect_token(&Token::Eq)?;
1578 url = Some(match parser.next_token().token {
1579 Token::SingleQuotedString(word) => Ok(word),
1580 _ => parser.expected_ref("a URL statement", parser.peek_token_ref()),
1581 }?)
1582 }
1583
1584 if parser.parse_keyword(Keyword::STORAGE_INTEGRATION) {
1586 parser.expect_token(&Token::Eq)?;
1587 storage_integration = Some(parser.next_token().token.to_string());
1588 }
1589
1590 if parser.parse_keyword(Keyword::ENDPOINT) {
1592 parser.expect_token(&Token::Eq)?;
1593 endpoint = Some(match parser.next_token().token {
1594 Token::SingleQuotedString(word) => Ok(word),
1595 _ => parser.expected_ref("an endpoint statement", parser.peek_token_ref()),
1596 }?)
1597 }
1598
1599 if parser.parse_keyword(Keyword::CREDENTIALS) {
1601 parser.expect_token(&Token::Eq)?;
1602 credentials = KeyValueOptions {
1603 options: parser.parse_key_value_options(true, &[])?.options,
1604 delimiter: KeyValueOptionsDelimiter::Space,
1605 };
1606 }
1607
1608 if parser.parse_keyword(Keyword::ENCRYPTION) {
1610 parser.expect_token(&Token::Eq)?;
1611 encryption = KeyValueOptions {
1612 options: parser.parse_key_value_options(true, &[])?.options,
1613 delimiter: KeyValueOptionsDelimiter::Space,
1614 };
1615 }
1616
1617 Ok(StageParamsObject {
1618 url,
1619 encryption,
1620 endpoint,
1621 storage_integration,
1622 credentials,
1623 })
1624}
1625
1626fn parse_session_options(
1631 parser: &mut Parser,
1632 set: bool,
1633) -> Result<Vec<KeyValueOption>, ParserError> {
1634 let mut options: Vec<KeyValueOption> = Vec::new();
1635 let empty = String::new;
1636 loop {
1637 let peeked_token = parser.peek_token();
1638 match peeked_token.token {
1639 Token::SemiColon | Token::EOF => break,
1640 Token::Comma => {
1641 parser.advance_token();
1642 continue;
1643 }
1644 Token::Word(key) => {
1645 parser.advance_token();
1646 if set {
1647 let option = parser.parse_key_value_option(&key)?;
1648 options.push(option);
1649 } else {
1650 options.push(KeyValueOption {
1651 option_name: key.value,
1652 option_value: KeyValueOptionKind::Single(
1653 Value::Placeholder(empty()).with_span(Span {
1654 start: peeked_token.span.end,
1655 end: peeked_token.span.end,
1656 }),
1657 ),
1658 });
1659 }
1660 }
1661 _ => {
1662 return parser.expected("another option or end of statement", peeked_token);
1663 }
1664 }
1665 }
1666 if options.is_empty() {
1667 Err(ParserError::ParserError(
1668 "expected at least one option".to_string(),
1669 ))
1670 } else {
1671 Ok(options)
1672 }
1673}
1674
1675fn parse_identity_property(parser: &mut Parser) -> Result<IdentityProperty, ParserError> {
1682 let parameters = if parser.consume_token(&Token::LParen) {
1683 let seed = parser.parse_number()?;
1684 parser.expect_token(&Token::Comma)?;
1685 let increment = parser.parse_number()?;
1686 parser.expect_token(&Token::RParen)?;
1687
1688 Some(IdentityPropertyFormatKind::FunctionCall(
1689 IdentityParameters { seed, increment },
1690 ))
1691 } else if parser.parse_keyword(Keyword::START) {
1692 let seed = parser.parse_number()?;
1693 parser.expect_keyword_is(Keyword::INCREMENT)?;
1694 let increment = parser.parse_number()?;
1695
1696 Some(IdentityPropertyFormatKind::StartAndIncrement(
1697 IdentityParameters { seed, increment },
1698 ))
1699 } else {
1700 None
1701 };
1702 let order = match parser.parse_one_of_keywords(&[Keyword::ORDER, Keyword::NOORDER]) {
1703 Some(Keyword::ORDER) => Some(IdentityPropertyOrder::Order),
1704 Some(Keyword::NOORDER) => Some(IdentityPropertyOrder::NoOrder),
1705 _ => None,
1706 };
1707 Ok(IdentityProperty { parameters, order })
1708}
1709
1710fn parse_column_policy_property(
1717 parser: &mut Parser,
1718 with: bool,
1719) -> Result<ColumnPolicyProperty, ParserError> {
1720 let policy_name = parser.parse_object_name(false)?;
1721 let using_columns = if parser.parse_keyword(Keyword::USING) {
1722 parser.expect_token(&Token::LParen)?;
1723 let columns = parser.parse_comma_separated(|p| p.parse_identifier())?;
1724 parser.expect_token(&Token::RParen)?;
1725 Some(columns)
1726 } else {
1727 None
1728 };
1729
1730 Ok(ColumnPolicyProperty {
1731 with,
1732 policy_name,
1733 using_columns,
1734 })
1735}
1736
1737fn parse_column_tags(parser: &mut Parser, with: bool) -> Result<TagsColumnOption, ParserError> {
1744 parser.expect_token(&Token::LParen)?;
1745 let tags = parser.parse_comma_separated(Parser::parse_tag)?;
1746 parser.expect_token(&Token::RParen)?;
1747
1748 Ok(TagsColumnOption { with, tags })
1749}
1750
1751fn parse_show_objects(terse: bool, parser: &mut Parser) -> Result<Statement, ParserError> {
1754 let show_options = parser.parse_show_stmt_options()?;
1755 Ok(Statement::ShowObjects(ShowObjects {
1756 terse,
1757 show_options,
1758 }))
1759}
1760
1761fn parse_multi_table_insert(
1780 parser: &mut Parser,
1781 insert_token: TokenWithSpan,
1782 overwrite: bool,
1783 multi_table_insert_type: MultiTableInsertType,
1784) -> Result<Statement, ParserError> {
1785 let is_conditional = parser.peek_keyword(Keyword::WHEN);
1787
1788 let (multi_table_into_clauses, multi_table_when_clauses, multi_table_else_clause) =
1789 if is_conditional {
1790 let (when_clauses, else_clause) = parse_multi_table_insert_when_clauses(parser)?;
1792 (vec![], when_clauses, else_clause)
1793 } else {
1794 let into_clauses = parse_multi_table_insert_into_clauses(parser)?;
1796 (into_clauses, vec![], None)
1797 };
1798
1799 let source = parser.parse_query()?;
1801
1802 Ok(Statement::Insert(Insert {
1803 insert_token: insert_token.into(),
1804 optimizer_hints: vec![],
1805 or: None,
1806 ignore: false,
1807 into: false,
1808 table: TableObject::TableName(ObjectName(vec![])), table_alias: None,
1810 columns: vec![],
1811 overwrite,
1812 source: Some(source),
1813 assignments: vec![],
1814 partitioned: None,
1815 after_columns: vec![],
1816 has_table_keyword: false,
1817 on: None,
1818 returning: None,
1819 output: None,
1820 replace_into: false,
1821 priority: None,
1822 insert_alias: None,
1823 settings: None,
1824 format_clause: None,
1825 multi_table_insert_type: Some(multi_table_insert_type),
1826 multi_table_into_clauses,
1827 multi_table_when_clauses,
1828 multi_table_else_clause,
1829 }))
1830}
1831
1832fn parse_multi_table_insert_into_clauses(
1834 parser: &mut Parser,
1835) -> Result<Vec<MultiTableInsertIntoClause>, ParserError> {
1836 let mut into_clauses = vec![];
1837 while parser.parse_keyword(Keyword::INTO) {
1838 into_clauses.push(parse_multi_table_insert_into_clause(parser)?);
1839 }
1840 if into_clauses.is_empty() {
1841 return parser.expected_ref("INTO clause in multi-table INSERT", parser.peek_token_ref());
1842 }
1843 Ok(into_clauses)
1844}
1845
1846fn parse_multi_table_insert_into_clause(
1850 parser: &mut Parser,
1851) -> Result<MultiTableInsertIntoClause, ParserError> {
1852 let table_name = parser.parse_object_name(false)?;
1853
1854 let columns = parser
1856 .maybe_parse(|p| p.parse_parenthesized_column_list(IsOptional::Mandatory, false))?
1857 .unwrap_or_default();
1858
1859 let values = if parser.parse_keyword(Keyword::VALUES) {
1861 parser.expect_token(&Token::LParen)?;
1862 let values = parser.parse_comma_separated(parse_multi_table_insert_value)?;
1863 parser.expect_token(&Token::RParen)?;
1864 Some(MultiTableInsertValues { values })
1865 } else {
1866 None
1867 };
1868
1869 Ok(MultiTableInsertIntoClause {
1870 table_name,
1871 columns,
1872 values,
1873 })
1874}
1875
1876fn parse_multi_table_insert_value(
1878 parser: &mut Parser,
1879) -> Result<MultiTableInsertValue, ParserError> {
1880 if parser.parse_keyword(Keyword::DEFAULT) {
1881 Ok(MultiTableInsertValue::Default)
1882 } else {
1883 Ok(MultiTableInsertValue::Expr(parser.parse_expr()?))
1884 }
1885}
1886
1887fn parse_multi_table_insert_when_clauses(
1889 parser: &mut Parser,
1890) -> Result<
1891 (
1892 Vec<MultiTableInsertWhenClause>,
1893 Option<Vec<MultiTableInsertIntoClause>>,
1894 ),
1895 ParserError,
1896> {
1897 let mut when_clauses = vec![];
1898 let mut else_clause = None;
1899
1900 while parser.parse_keyword(Keyword::WHEN) {
1902 let condition = parser.parse_expr()?;
1903 parser.expect_keyword(Keyword::THEN)?;
1904
1905 let into_clauses = parse_multi_table_insert_into_clauses(parser)?;
1907
1908 when_clauses.push(MultiTableInsertWhenClause {
1909 condition,
1910 into_clauses,
1911 });
1912 }
1913
1914 if parser.parse_keyword(Keyword::ELSE) {
1916 else_clause = Some(parse_multi_table_insert_into_clauses(parser)?);
1917 }
1918
1919 if when_clauses.is_empty() {
1920 return parser.expected_ref(
1921 "at least one WHEN clause in conditional multi-table INSERT",
1922 parser.peek_token_ref(),
1923 );
1924 }
1925
1926 Ok((when_clauses, else_clause))
1927}