1#[cfg(not(feature = "std"))]
19use crate::alloc::string::ToString;
20use crate::ast::helpers::attached_token::AttachedToken;
21use crate::ast::helpers::key_value_options::{
22 KeyValueOption, KeyValueOptionKind, KeyValueOptions, KeyValueOptionsDelimiter,
23};
24use crate::ast::helpers::stmt_create_database::CreateDatabaseBuilder;
25use crate::ast::helpers::stmt_create_table::CreateTableBuilder;
26use crate::ast::helpers::stmt_data_loading::{
27 FileStagingCommand, StageLoadSelectItem, StageLoadSelectItemKind, StageParamsObject,
28};
29use crate::ast::{
30 AlterTable, AlterTableOperation, AlterTableType, CatalogSyncNamespaceMode, ColumnOption,
31 ColumnPolicy, ColumnPolicyProperty, ContactEntry, CopyIntoSnowflakeKind, CreateTableLikeKind,
32 DollarQuotedString, Ident, IdentityParameters, IdentityProperty, IdentityPropertyFormatKind,
33 IdentityPropertyKind, IdentityPropertyOrder, InitializeKind, ObjectName, ObjectNamePart,
34 RefreshModeKind, RowAccessPolicy, ShowObjects, SqlOption, Statement,
35 StorageSerializationPolicy, TagsColumnOption, Value, WrappedCollection,
36};
37use crate::dialect::{Dialect, Precedence};
38use crate::keywords::Keyword;
39use crate::parser::{IsOptional, Parser, ParserError};
40use crate::tokenizer::Token;
41#[cfg(not(feature = "std"))]
42use alloc::boxed::Box;
43#[cfg(not(feature = "std"))]
44use alloc::string::String;
45#[cfg(not(feature = "std"))]
46use alloc::vec::Vec;
47#[cfg(not(feature = "std"))]
48use alloc::{format, vec};
49
50use super::keywords::RESERVED_FOR_IDENTIFIER;
51
52const RESERVED_KEYWORDS_FOR_SELECT_ITEM_OPERATOR: [Keyword; 1] = [Keyword::CONNECT_BY_ROOT];
53
54const RESERVED_KEYWORDS_FOR_TABLE_FACTOR: &[Keyword] = &[
56 Keyword::ALL,
57 Keyword::ALTER,
58 Keyword::AND,
59 Keyword::ANY,
60 Keyword::AS,
61 Keyword::BETWEEN,
62 Keyword::BY,
63 Keyword::CHECK,
64 Keyword::COLUMN,
65 Keyword::CONNECT,
66 Keyword::CREATE,
67 Keyword::CROSS,
68 Keyword::CURRENT,
69 Keyword::DELETE,
70 Keyword::DISTINCT,
71 Keyword::DROP,
72 Keyword::ELSE,
73 Keyword::EXISTS,
74 Keyword::FOLLOWING,
75 Keyword::FOR,
76 Keyword::FROM,
77 Keyword::FULL,
78 Keyword::GRANT,
79 Keyword::GROUP,
80 Keyword::HAVING,
81 Keyword::ILIKE,
82 Keyword::IN,
83 Keyword::INCREMENT,
84 Keyword::INNER,
85 Keyword::INSERT,
86 Keyword::INTERSECT,
87 Keyword::INTO,
88 Keyword::IS,
89 Keyword::JOIN,
90 Keyword::LEFT,
91 Keyword::LIKE,
92 Keyword::MINUS,
93 Keyword::NATURAL,
94 Keyword::NOT,
95 Keyword::NULL,
96 Keyword::OF,
97 Keyword::ON,
98 Keyword::OR,
99 Keyword::ORDER,
100 Keyword::QUALIFY,
101 Keyword::REGEXP,
102 Keyword::REVOKE,
103 Keyword::RIGHT,
104 Keyword::RLIKE,
105 Keyword::ROW,
106 Keyword::ROWS,
107 Keyword::SAMPLE,
108 Keyword::SELECT,
109 Keyword::SET,
110 Keyword::SOME,
111 Keyword::START,
112 Keyword::TABLE,
113 Keyword::TABLESAMPLE,
114 Keyword::THEN,
115 Keyword::TO,
116 Keyword::TRIGGER,
117 Keyword::UNION,
118 Keyword::UNIQUE,
119 Keyword::UPDATE,
120 Keyword::USING,
121 Keyword::VALUES,
122 Keyword::WHEN,
123 Keyword::WHENEVER,
124 Keyword::WHERE,
125 Keyword::WINDOW,
126 Keyword::WITH,
127];
128
129#[derive(Debug, Default)]
131pub struct SnowflakeDialect;
132
133impl Dialect for SnowflakeDialect {
134 fn is_identifier_start(&self, ch: char) -> bool {
136 ch.is_ascii_lowercase() || ch.is_ascii_uppercase() || ch == '_'
137 }
138
139 fn supports_projection_trailing_commas(&self) -> bool {
140 true
141 }
142
143 fn supports_from_trailing_commas(&self) -> bool {
144 true
145 }
146
147 fn supports_object_name_double_dot_notation(&self) -> bool {
152 true
153 }
154
155 fn is_identifier_part(&self, ch: char) -> bool {
156 ch.is_ascii_lowercase()
157 || ch.is_ascii_uppercase()
158 || ch.is_ascii_digit()
159 || ch == '$'
160 || ch == '_'
161 }
162
163 fn supports_string_literal_backslash_escape(&self) -> bool {
165 true
166 }
167
168 fn supports_within_after_array_aggregation(&self) -> bool {
169 true
170 }
171
172 fn supports_outer_join_operator(&self) -> bool {
174 true
175 }
176
177 fn supports_connect_by(&self) -> bool {
178 true
179 }
180
181 fn supports_execute_immediate(&self) -> bool {
183 true
184 }
185
186 fn supports_match_recognize(&self) -> bool {
187 true
188 }
189
190 fn supports_dictionary_syntax(&self) -> bool {
195 true
196 }
197
198 fn supports_window_function_null_treatment_arg(&self) -> bool {
201 true
202 }
203
204 fn supports_parenthesized_set_variables(&self) -> bool {
206 true
207 }
208
209 fn supports_comment_on(&self) -> bool {
211 true
212 }
213
214 fn parse_statement(&self, parser: &mut Parser) -> Option<Result<Statement, ParserError>> {
215 if parser.parse_keyword(Keyword::BEGIN) {
216 return Some(parser.parse_begin_exception_end());
217 }
218
219 if parser.parse_keywords(&[Keyword::ALTER, Keyword::DYNAMIC, Keyword::TABLE]) {
220 return Some(parse_alter_dynamic_table(parser));
222 }
223
224 if parser.parse_keywords(&[Keyword::ALTER, Keyword::EXTERNAL, Keyword::TABLE]) {
225 return Some(parse_alter_external_table(parser));
227 }
228
229 if parser.parse_keywords(&[Keyword::ALTER, Keyword::SESSION]) {
230 let set = match parser.parse_one_of_keywords(&[Keyword::SET, Keyword::UNSET]) {
232 Some(Keyword::SET) => true,
233 Some(Keyword::UNSET) => false,
234 _ => return Some(parser.expected("SET or UNSET", parser.peek_token())),
235 };
236 return Some(parse_alter_session(parser, set));
237 }
238
239 if parser.parse_keyword(Keyword::CREATE) {
240 let or_replace = parser.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
243 let global = match parser.parse_one_of_keywords(&[Keyword::LOCAL, Keyword::GLOBAL]) {
245 Some(Keyword::LOCAL) => Some(false),
246 Some(Keyword::GLOBAL) => Some(true),
247 _ => None,
248 };
249
250 let dynamic = parser.parse_keyword(Keyword::DYNAMIC);
251
252 let mut temporary = false;
253 let mut volatile = false;
254 let mut transient = false;
255 let mut iceberg = false;
256
257 match parser.parse_one_of_keywords(&[
258 Keyword::TEMP,
259 Keyword::TEMPORARY,
260 Keyword::VOLATILE,
261 Keyword::TRANSIENT,
262 Keyword::ICEBERG,
263 ]) {
264 Some(Keyword::TEMP | Keyword::TEMPORARY) => temporary = true,
265 Some(Keyword::VOLATILE) => volatile = true,
266 Some(Keyword::TRANSIENT) => transient = true,
267 Some(Keyword::ICEBERG) => iceberg = true,
268 _ => {}
269 }
270
271 if parser.parse_keyword(Keyword::STAGE) {
272 return Some(parse_create_stage(or_replace, temporary, parser));
274 } else if parser.parse_keyword(Keyword::TABLE) {
275 return Some(parse_create_table(
276 or_replace, global, temporary, volatile, transient, iceberg, dynamic, parser,
277 ));
278 } else if parser.parse_keyword(Keyword::DATABASE) {
279 return Some(parse_create_database(or_replace, transient, parser));
280 } else {
281 let mut back = 1;
283 if or_replace {
284 back += 2
285 }
286 if temporary {
287 back += 1
288 }
289 for _i in 0..back {
290 parser.prev_token();
291 }
292 }
293 }
294 if parser.parse_keywords(&[Keyword::COPY, Keyword::INTO]) {
295 return Some(parse_copy_into(parser));
297 }
298
299 if let Some(kw) = parser.parse_one_of_keywords(&[
300 Keyword::LIST,
301 Keyword::LS,
302 Keyword::REMOVE,
303 Keyword::RM,
304 ]) {
305 return Some(parse_file_staging_command(kw, parser));
306 }
307
308 if parser.parse_keyword(Keyword::SHOW) {
309 let terse = parser.parse_keyword(Keyword::TERSE);
310 if parser.parse_keyword(Keyword::OBJECTS) {
311 return Some(parse_show_objects(terse, parser));
312 }
313 if terse {
315 parser.prev_token();
316 }
317 parser.prev_token();
319 }
320
321 None
322 }
323
324 fn parse_column_option(
325 &self,
326 parser: &mut Parser,
327 ) -> Result<Option<Result<Option<ColumnOption>, ParserError>>, ParserError> {
328 parser.maybe_parse(|parser| {
329 let with = parser.parse_keyword(Keyword::WITH);
330
331 if parser.parse_keyword(Keyword::IDENTITY) {
332 Ok(parse_identity_property(parser)
333 .map(|p| Some(ColumnOption::Identity(IdentityPropertyKind::Identity(p)))))
334 } else if parser.parse_keyword(Keyword::AUTOINCREMENT) {
335 Ok(parse_identity_property(parser).map(|p| {
336 Some(ColumnOption::Identity(IdentityPropertyKind::Autoincrement(
337 p,
338 )))
339 }))
340 } else if parser.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
341 Ok(parse_column_policy_property(parser, with)
342 .map(|p| Some(ColumnOption::Policy(ColumnPolicy::MaskingPolicy(p)))))
343 } else if parser.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
344 Ok(parse_column_policy_property(parser, with)
345 .map(|p| Some(ColumnOption::Policy(ColumnPolicy::ProjectionPolicy(p)))))
346 } else if parser.parse_keywords(&[Keyword::TAG]) {
347 Ok(parse_column_tags(parser, with).map(|p| Some(ColumnOption::Tags(p))))
348 } else {
349 Err(ParserError::ParserError("not found match".to_string()))
350 }
351 })
352 }
353
354 fn get_next_precedence(&self, parser: &Parser) -> Option<Result<u8, ParserError>> {
355 let token = parser.peek_token();
356 match token.token {
358 Token::Colon => Some(Ok(self.prec_value(Precedence::DoubleColon))),
359 _ => None,
360 }
361 }
362
363 fn describe_requires_table_keyword(&self) -> bool {
364 true
365 }
366
367 fn allow_extract_custom(&self) -> bool {
368 true
369 }
370
371 fn allow_extract_single_quotes(&self) -> bool {
372 true
373 }
374
375 fn supports_show_like_before_in(&self) -> bool {
378 true
379 }
380
381 fn supports_left_associative_joins_without_parens(&self) -> bool {
382 false
383 }
384
385 fn is_reserved_for_identifier(&self, kw: Keyword) -> bool {
386 if matches!(kw, Keyword::INTERVAL) {
389 false
390 } else {
391 RESERVED_FOR_IDENTIFIER.contains(&kw)
392 }
393 }
394
395 fn supports_partiql(&self) -> bool {
396 true
397 }
398
399 fn is_column_alias(&self, kw: &Keyword, parser: &mut Parser) -> bool {
400 match kw {
401 Keyword::EXCEPT
405 | Keyword::RETURNING if !matches!(parser.peek_token_ref().token, Token::Comma | Token::EOF) =>
407 {
408 false
409 }
410
411 Keyword::LIMIT | Keyword::OFFSET if peek_for_limit_options(parser) => false,
414
415 Keyword::FETCH if parser.peek_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]).is_some()
420 || peek_for_limit_options(parser) =>
421 {
422 false
423 }
424
425 Keyword::FROM
429 | Keyword::GROUP
430 | Keyword::HAVING
431 | Keyword::INTERSECT
432 | Keyword::INTO
433 | Keyword::MINUS
434 | Keyword::ORDER
435 | Keyword::SELECT
436 | Keyword::UNION
437 | Keyword::WHERE
438 | Keyword::WITH => false,
439
440 _ => true,
442 }
443 }
444
445 fn is_table_alias(&self, kw: &Keyword, parser: &mut Parser) -> bool {
446 match kw {
447 Keyword::RETURNING
450 | Keyword::INNER
451 | Keyword::USING
452 | Keyword::PIVOT
453 | Keyword::UNPIVOT
454 | Keyword::EXCEPT
455 | Keyword::MATCH_RECOGNIZE
456 if !matches!(parser.peek_token_ref().token, Token::SemiColon | Token::EOF) =>
457 {
458 false
459 }
460
461 Keyword::LIMIT | Keyword::OFFSET if peek_for_limit_options(parser) => false,
465
466 Keyword::FETCH
471 if parser
472 .peek_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT])
473 .is_some()
474 || peek_for_limit_options(parser) =>
475 {
476 false
477 }
478
479 Keyword::RIGHT | Keyword::LEFT | Keyword::SEMI | Keyword::ANTI
482 if parser
483 .peek_one_of_keywords(&[Keyword::JOIN, Keyword::OUTER])
484 .is_some() =>
485 {
486 false
487 }
488
489 Keyword::GLOBAL if parser.peek_keyword(Keyword::FULL) => false,
490
491 Keyword::WITH
495 | Keyword::ORDER
496 | Keyword::SELECT
497 | Keyword::WHERE
498 | Keyword::GROUP
499 | Keyword::HAVING
500 | Keyword::LATERAL
501 | Keyword::UNION
502 | Keyword::INTERSECT
503 | Keyword::MINUS
504 | Keyword::ON
505 | Keyword::JOIN
506 | Keyword::INNER
507 | Keyword::CROSS
508 | Keyword::FULL
509 | Keyword::LEFT
510 | Keyword::RIGHT
511 | Keyword::NATURAL
512 | Keyword::USING
513 | Keyword::ASOF
514 | Keyword::MATCH_CONDITION
515 | Keyword::SET
516 | Keyword::QUALIFY
517 | Keyword::FOR
518 | Keyword::START
519 | Keyword::CONNECT
520 | Keyword::SAMPLE
521 | Keyword::TABLESAMPLE
522 | Keyword::FROM => false,
523
524 _ => true,
526 }
527 }
528
529 fn is_table_factor(&self, kw: &Keyword, parser: &mut Parser) -> bool {
530 match kw {
531 Keyword::LIMIT if peek_for_limit_options(parser) => false,
532 Keyword::TABLE if matches!(parser.peek_token_ref().token, Token::LParen) => true,
534 _ => !RESERVED_KEYWORDS_FOR_TABLE_FACTOR.contains(kw),
535 }
536 }
537
538 fn supports_timestamp_versioning(&self) -> bool {
540 true
541 }
542
543 fn supports_group_by_expr(&self) -> bool {
545 true
546 }
547
548 fn get_reserved_keywords_for_select_item_operator(&self) -> &[Keyword] {
550 &RESERVED_KEYWORDS_FOR_SELECT_ITEM_OPERATOR
551 }
552
553 fn supports_space_separated_column_options(&self) -> bool {
554 true
555 }
556
557 fn supports_comma_separated_drop_column_list(&self) -> bool {
558 true
559 }
560
561 fn is_identifier_generating_function_name(
562 &self,
563 ident: &Ident,
564 name_parts: &[ObjectNamePart],
565 ) -> bool {
566 ident.quote_style.is_none()
567 && ident.value.to_lowercase() == "identifier"
568 && !name_parts
569 .iter()
570 .any(|p| matches!(p, ObjectNamePart::Function(_)))
571 }
572
573 fn supports_select_expr_star(&self) -> bool {
575 true
576 }
577
578 fn supports_select_wildcard_exclude(&self) -> bool {
579 true
580 }
581
582 fn supports_semantic_view_table_factor(&self) -> bool {
583 true
584 }
585}
586
587fn peek_for_limit_options(parser: &Parser) -> bool {
590 match &parser.peek_token_ref().token {
591 Token::Number(_, _) | Token::Placeholder(_) => true,
592 Token::SingleQuotedString(val) if val.is_empty() => true,
593 Token::DollarQuotedString(DollarQuotedString { value, .. }) if value.is_empty() => true,
594 Token::Word(w) if w.keyword == Keyword::NULL => true,
595 _ => false,
596 }
597}
598
599fn parse_file_staging_command(kw: Keyword, parser: &mut Parser) -> Result<Statement, ParserError> {
600 let stage = parse_snowflake_stage_name(parser)?;
601 let pattern = if parser.parse_keyword(Keyword::PATTERN) {
602 parser.expect_token(&Token::Eq)?;
603 Some(parser.parse_literal_string()?)
604 } else {
605 None
606 };
607
608 match kw {
609 Keyword::LIST | Keyword::LS => Ok(Statement::List(FileStagingCommand { stage, pattern })),
610 Keyword::REMOVE | Keyword::RM => {
611 Ok(Statement::Remove(FileStagingCommand { stage, pattern }))
612 }
613 _ => Err(ParserError::ParserError(
614 "unexpected stage command, expecting LIST, LS, REMOVE or RM".to_string(),
615 )),
616 }
617}
618
619fn parse_alter_dynamic_table(parser: &mut Parser) -> Result<Statement, ParserError> {
622 let table_name = parser.parse_object_name(true)?;
624
625 let operation = if parser.parse_keyword(Keyword::REFRESH) {
627 AlterTableOperation::Refresh { subpath: None }
628 } else if parser.parse_keyword(Keyword::SUSPEND) {
629 AlterTableOperation::Suspend
630 } else if parser.parse_keyword(Keyword::RESUME) {
631 AlterTableOperation::Resume
632 } else {
633 return parser.expected(
634 "REFRESH, SUSPEND, or RESUME after ALTER DYNAMIC TABLE",
635 parser.peek_token(),
636 );
637 };
638
639 let end_token = if parser.peek_token_ref().token == Token::SemiColon {
640 parser.peek_token_ref().clone()
641 } else {
642 parser.get_current_token().clone()
643 };
644
645 Ok(Statement::AlterTable(AlterTable {
646 name: table_name,
647 if_exists: false,
648 only: false,
649 operations: vec![operation],
650 location: None,
651 on_cluster: None,
652 table_type: Some(AlterTableType::Dynamic),
653 end_token: AttachedToken(end_token),
654 }))
655}
656
657fn parse_alter_external_table(parser: &mut Parser) -> Result<Statement, ParserError> {
660 let if_exists = parser.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
661 let table_name = parser.parse_object_name(true)?;
662
663 let operation = if parser.parse_keyword(Keyword::REFRESH) {
665 let subpath = match parser.peek_token().token {
667 Token::SingleQuotedString(s) => {
668 parser.next_token();
669 Some(s)
670 }
671 _ => None,
672 };
673 AlterTableOperation::Refresh { subpath }
674 } else {
675 return parser.expected("REFRESH after ALTER EXTERNAL TABLE", parser.peek_token());
676 };
677
678 let end_token = if parser.peek_token_ref().token == Token::SemiColon {
679 parser.peek_token_ref().clone()
680 } else {
681 parser.get_current_token().clone()
682 };
683
684 Ok(Statement::AlterTable(AlterTable {
685 name: table_name,
686 if_exists,
687 only: false,
688 operations: vec![operation],
689 location: None,
690 on_cluster: None,
691 table_type: Some(AlterTableType::External),
692 end_token: AttachedToken(end_token),
693 }))
694}
695
696fn parse_alter_session(parser: &mut Parser, set: bool) -> Result<Statement, ParserError> {
699 let session_options = parse_session_options(parser, set)?;
700 Ok(Statement::AlterSession {
701 set,
702 session_params: KeyValueOptions {
703 options: session_options,
704 delimiter: KeyValueOptionsDelimiter::Space,
705 },
706 })
707}
708
709#[allow(clippy::too_many_arguments)]
713pub fn parse_create_table(
714 or_replace: bool,
715 global: Option<bool>,
716 temporary: bool,
717 volatile: bool,
718 transient: bool,
719 iceberg: bool,
720 dynamic: bool,
721 parser: &mut Parser,
722) -> Result<Statement, ParserError> {
723 let if_not_exists = parser.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
724 let table_name = parser.parse_object_name(false)?;
725
726 let mut builder = CreateTableBuilder::new(table_name)
727 .or_replace(or_replace)
728 .if_not_exists(if_not_exists)
729 .temporary(temporary)
730 .transient(transient)
731 .volatile(volatile)
732 .iceberg(iceberg)
733 .global(global)
734 .dynamic(dynamic)
735 .hive_formats(None);
736
737 let mut plain_options = vec![];
744
745 loop {
746 let next_token = parser.next_token();
747 match &next_token.token {
748 Token::Word(word) => match word.keyword {
749 Keyword::COPY => {
750 parser.expect_keyword_is(Keyword::GRANTS)?;
751 builder = builder.copy_grants(true);
752 }
753 Keyword::COMMENT => {
754 parser.prev_token();
756 if let Some(comment_def) = parser.parse_optional_inline_comment()? {
757 plain_options.push(SqlOption::Comment(comment_def))
758 }
759 }
760 Keyword::AS => {
761 let query = parser.parse_query()?;
762 builder = builder.query(Some(query));
763 }
764 Keyword::CLONE => {
765 let clone = parser.parse_object_name(false).ok();
766 builder = builder.clone_clause(clone);
767 }
768 Keyword::LIKE => {
769 let name = parser.parse_object_name(false)?;
770 builder = builder.like(Some(CreateTableLikeKind::Plain(
771 crate::ast::CreateTableLike {
772 name,
773 defaults: None,
774 },
775 )));
776 }
777 Keyword::CLUSTER => {
778 parser.expect_keyword_is(Keyword::BY)?;
779 parser.expect_token(&Token::LParen)?;
780 let cluster_by = Some(WrappedCollection::Parentheses(
781 parser.parse_comma_separated(|p| p.parse_expr())?,
782 ));
783 parser.expect_token(&Token::RParen)?;
784
785 builder = builder.cluster_by(cluster_by)
786 }
787 Keyword::ENABLE_SCHEMA_EVOLUTION => {
788 parser.expect_token(&Token::Eq)?;
789 builder = builder.enable_schema_evolution(Some(parser.parse_boolean_string()?));
790 }
791 Keyword::CHANGE_TRACKING => {
792 parser.expect_token(&Token::Eq)?;
793 builder = builder.change_tracking(Some(parser.parse_boolean_string()?));
794 }
795 Keyword::DATA_RETENTION_TIME_IN_DAYS => {
796 parser.expect_token(&Token::Eq)?;
797 let data_retention_time_in_days = parser.parse_literal_uint()?;
798 builder =
799 builder.data_retention_time_in_days(Some(data_retention_time_in_days));
800 }
801 Keyword::MAX_DATA_EXTENSION_TIME_IN_DAYS => {
802 parser.expect_token(&Token::Eq)?;
803 let max_data_extension_time_in_days = parser.parse_literal_uint()?;
804 builder = builder
805 .max_data_extension_time_in_days(Some(max_data_extension_time_in_days));
806 }
807 Keyword::DEFAULT_DDL_COLLATION => {
808 parser.expect_token(&Token::Eq)?;
809 let default_ddl_collation = parser.parse_literal_string()?;
810 builder = builder.default_ddl_collation(Some(default_ddl_collation));
811 }
812 Keyword::WITH => {
815 parser.expect_one_of_keywords(&[
816 Keyword::AGGREGATION,
817 Keyword::TAG,
818 Keyword::ROW,
819 ])?;
820 parser.prev_token();
821 }
822 Keyword::AGGREGATION => {
823 parser.expect_keyword_is(Keyword::POLICY)?;
824 let aggregation_policy = parser.parse_object_name(false)?;
825 builder = builder.with_aggregation_policy(Some(aggregation_policy));
826 }
827 Keyword::ROW => {
828 parser.expect_keywords(&[Keyword::ACCESS, Keyword::POLICY])?;
829 let policy = parser.parse_object_name(false)?;
830 parser.expect_keyword_is(Keyword::ON)?;
831 parser.expect_token(&Token::LParen)?;
832 let columns = parser.parse_comma_separated(|p| p.parse_identifier())?;
833 parser.expect_token(&Token::RParen)?;
834
835 builder =
836 builder.with_row_access_policy(Some(RowAccessPolicy::new(policy, columns)))
837 }
838 Keyword::TAG => {
839 parser.expect_token(&Token::LParen)?;
840 let tags = parser.parse_comma_separated(Parser::parse_tag)?;
841 parser.expect_token(&Token::RParen)?;
842 builder = builder.with_tags(Some(tags));
843 }
844 Keyword::ON if parser.parse_keyword(Keyword::COMMIT) => {
845 let on_commit = Some(parser.parse_create_table_on_commit()?);
846 builder = builder.on_commit(on_commit);
847 }
848 Keyword::EXTERNAL_VOLUME => {
849 parser.expect_token(&Token::Eq)?;
850 builder.external_volume = Some(parser.parse_literal_string()?);
851 }
852 Keyword::CATALOG => {
853 parser.expect_token(&Token::Eq)?;
854 builder.catalog = Some(parser.parse_literal_string()?);
855 }
856 Keyword::BASE_LOCATION => {
857 parser.expect_token(&Token::Eq)?;
858 builder.base_location = Some(parser.parse_literal_string()?);
859 }
860 Keyword::CATALOG_SYNC => {
861 parser.expect_token(&Token::Eq)?;
862 builder.catalog_sync = Some(parser.parse_literal_string()?);
863 }
864 Keyword::STORAGE_SERIALIZATION_POLICY => {
865 parser.expect_token(&Token::Eq)?;
866
867 builder.storage_serialization_policy =
868 Some(parse_storage_serialization_policy(parser)?);
869 }
870 Keyword::IF if parser.parse_keywords(&[Keyword::NOT, Keyword::EXISTS]) => {
871 builder = builder.if_not_exists(true);
872 }
873 Keyword::TARGET_LAG => {
874 parser.expect_token(&Token::Eq)?;
875 let target_lag = parser.parse_literal_string()?;
876 builder = builder.target_lag(Some(target_lag));
877 }
878 Keyword::WAREHOUSE => {
879 parser.expect_token(&Token::Eq)?;
880 let warehouse = parser.parse_identifier()?;
881 builder = builder.warehouse(Some(warehouse));
882 }
883 Keyword::AT | Keyword::BEFORE => {
884 parser.prev_token();
885 let version = parser.maybe_parse_table_version()?;
886 builder = builder.version(version);
887 }
888 Keyword::REFRESH_MODE => {
889 parser.expect_token(&Token::Eq)?;
890 let refresh_mode = match parser.parse_one_of_keywords(&[
891 Keyword::AUTO,
892 Keyword::FULL,
893 Keyword::INCREMENTAL,
894 ]) {
895 Some(Keyword::AUTO) => Some(RefreshModeKind::Auto),
896 Some(Keyword::FULL) => Some(RefreshModeKind::Full),
897 Some(Keyword::INCREMENTAL) => Some(RefreshModeKind::Incremental),
898 _ => return parser.expected("AUTO, FULL or INCREMENTAL", next_token),
899 };
900 builder = builder.refresh_mode(refresh_mode);
901 }
902 Keyword::INITIALIZE => {
903 parser.expect_token(&Token::Eq)?;
904 let initialize = match parser
905 .parse_one_of_keywords(&[Keyword::ON_CREATE, Keyword::ON_SCHEDULE])
906 {
907 Some(Keyword::ON_CREATE) => Some(InitializeKind::OnCreate),
908 Some(Keyword::ON_SCHEDULE) => Some(InitializeKind::OnSchedule),
909 _ => return parser.expected("ON_CREATE or ON_SCHEDULE", next_token),
910 };
911 builder = builder.initialize(initialize);
912 }
913 Keyword::REQUIRE if parser.parse_keyword(Keyword::USER) => {
914 builder = builder.require_user(true);
915 }
916 _ => {
917 return parser.expected("end of statement", next_token);
918 }
919 },
920 Token::LParen => {
921 parser.prev_token();
922 let (columns, constraints) = parser.parse_columns()?;
923 builder = builder.columns(columns).constraints(constraints);
924 }
925 Token::EOF => {
926 break;
927 }
928 Token::SemiColon => {
929 parser.prev_token();
930 break;
931 }
932 _ => {
933 return parser.expected("end of statement", next_token);
934 }
935 }
936 }
937 let table_options = if !plain_options.is_empty() {
938 crate::ast::CreateTableOptions::Plain(plain_options)
939 } else {
940 crate::ast::CreateTableOptions::None
941 };
942
943 builder = builder.table_options(table_options);
944
945 if iceberg && builder.base_location.is_none() {
946 return Err(ParserError::ParserError(
947 "BASE_LOCATION is required for ICEBERG tables".to_string(),
948 ));
949 }
950
951 Ok(builder.build())
952}
953
954pub fn parse_create_database(
957 or_replace: bool,
958 transient: bool,
959 parser: &mut Parser,
960) -> Result<Statement, ParserError> {
961 let if_not_exists = parser.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
962 let name = parser.parse_object_name(false)?;
963
964 let mut builder = CreateDatabaseBuilder::new(name)
965 .or_replace(or_replace)
966 .transient(transient)
967 .if_not_exists(if_not_exists);
968
969 loop {
970 let next_token = parser.next_token();
971 match &next_token.token {
972 Token::Word(word) => match word.keyword {
973 Keyword::CLONE => {
974 builder = builder.clone_clause(Some(parser.parse_object_name(false)?));
975 }
976 Keyword::DATA_RETENTION_TIME_IN_DAYS => {
977 parser.expect_token(&Token::Eq)?;
978 builder =
979 builder.data_retention_time_in_days(Some(parser.parse_literal_uint()?));
980 }
981 Keyword::MAX_DATA_EXTENSION_TIME_IN_DAYS => {
982 parser.expect_token(&Token::Eq)?;
983 builder =
984 builder.max_data_extension_time_in_days(Some(parser.parse_literal_uint()?));
985 }
986 Keyword::EXTERNAL_VOLUME => {
987 parser.expect_token(&Token::Eq)?;
988 builder = builder.external_volume(Some(parser.parse_literal_string()?));
989 }
990 Keyword::CATALOG => {
991 parser.expect_token(&Token::Eq)?;
992 builder = builder.catalog(Some(parser.parse_literal_string()?));
993 }
994 Keyword::REPLACE_INVALID_CHARACTERS => {
995 parser.expect_token(&Token::Eq)?;
996 builder =
997 builder.replace_invalid_characters(Some(parser.parse_boolean_string()?));
998 }
999 Keyword::DEFAULT_DDL_COLLATION => {
1000 parser.expect_token(&Token::Eq)?;
1001 builder = builder.default_ddl_collation(Some(parser.parse_literal_string()?));
1002 }
1003 Keyword::STORAGE_SERIALIZATION_POLICY => {
1004 parser.expect_token(&Token::Eq)?;
1005 let policy = parse_storage_serialization_policy(parser)?;
1006 builder = builder.storage_serialization_policy(Some(policy));
1007 }
1008 Keyword::COMMENT => {
1009 parser.expect_token(&Token::Eq)?;
1010 builder = builder.comment(Some(parser.parse_literal_string()?));
1011 }
1012 Keyword::CATALOG_SYNC => {
1013 parser.expect_token(&Token::Eq)?;
1014 builder = builder.catalog_sync(Some(parser.parse_literal_string()?));
1015 }
1016 Keyword::CATALOG_SYNC_NAMESPACE_FLATTEN_DELIMITER => {
1017 parser.expect_token(&Token::Eq)?;
1018 builder = builder.catalog_sync_namespace_flatten_delimiter(Some(
1019 parser.parse_literal_string()?,
1020 ));
1021 }
1022 Keyword::CATALOG_SYNC_NAMESPACE_MODE => {
1023 parser.expect_token(&Token::Eq)?;
1024 let mode =
1025 match parser.parse_one_of_keywords(&[Keyword::NEST, Keyword::FLATTEN]) {
1026 Some(Keyword::NEST) => CatalogSyncNamespaceMode::Nest,
1027 Some(Keyword::FLATTEN) => CatalogSyncNamespaceMode::Flatten,
1028 _ => {
1029 return parser.expected("NEST or FLATTEN", next_token);
1030 }
1031 };
1032 builder = builder.catalog_sync_namespace_mode(Some(mode));
1033 }
1034 Keyword::WITH => {
1035 if parser.parse_keyword(Keyword::TAG) {
1036 parser.expect_token(&Token::LParen)?;
1037 let tags = parser.parse_comma_separated(Parser::parse_tag)?;
1038 parser.expect_token(&Token::RParen)?;
1039 builder = builder.with_tags(Some(tags));
1040 } else if parser.parse_keyword(Keyword::CONTACT) {
1041 parser.expect_token(&Token::LParen)?;
1042 let contacts = parser.parse_comma_separated(|p| {
1043 let purpose = p.parse_identifier()?.value;
1044 p.expect_token(&Token::Eq)?;
1045 let contact = p.parse_identifier()?.value;
1046 Ok(ContactEntry { purpose, contact })
1047 })?;
1048 parser.expect_token(&Token::RParen)?;
1049 builder = builder.with_contacts(Some(contacts));
1050 } else {
1051 return parser.expected("TAG or CONTACT", next_token);
1052 }
1053 }
1054 _ => return parser.expected("end of statement", next_token),
1055 },
1056 Token::SemiColon | Token::EOF => break,
1057 _ => return parser.expected("end of statement", next_token),
1058 }
1059 }
1060 Ok(builder.build())
1061}
1062
1063pub fn parse_storage_serialization_policy(
1064 parser: &mut Parser,
1065) -> Result<StorageSerializationPolicy, ParserError> {
1066 let next_token = parser.next_token();
1067 match &next_token.token {
1068 Token::Word(w) => match w.keyword {
1069 Keyword::COMPATIBLE => Ok(StorageSerializationPolicy::Compatible),
1070 Keyword::OPTIMIZED => Ok(StorageSerializationPolicy::Optimized),
1071 _ => parser.expected("storage_serialization_policy", next_token),
1072 },
1073 _ => parser.expected("storage_serialization_policy", next_token),
1074 }
1075}
1076
1077pub fn parse_create_stage(
1078 or_replace: bool,
1079 temporary: bool,
1080 parser: &mut Parser,
1081) -> Result<Statement, ParserError> {
1082 let if_not_exists = parser.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
1084 let name = parser.parse_object_name(false)?;
1085 let mut directory_table_params = Vec::new();
1086 let mut file_format = Vec::new();
1087 let mut copy_options = Vec::new();
1088 let mut comment = None;
1089
1090 let stage_params = parse_stage_params(parser)?;
1092
1093 if parser.parse_keyword(Keyword::DIRECTORY) {
1095 parser.expect_token(&Token::Eq)?;
1096 directory_table_params = parser.parse_key_value_options(true, &[])?.options;
1097 }
1098
1099 if parser.parse_keyword(Keyword::FILE_FORMAT) {
1101 parser.expect_token(&Token::Eq)?;
1102 file_format = parser.parse_key_value_options(true, &[])?.options;
1103 }
1104
1105 if parser.parse_keyword(Keyword::COPY_OPTIONS) {
1107 parser.expect_token(&Token::Eq)?;
1108 copy_options = parser.parse_key_value_options(true, &[])?.options;
1109 }
1110
1111 if parser.parse_keyword(Keyword::COMMENT) {
1113 parser.expect_token(&Token::Eq)?;
1114 comment = Some(parser.parse_comment_value()?);
1115 }
1116
1117 Ok(Statement::CreateStage {
1118 or_replace,
1119 temporary,
1120 if_not_exists,
1121 name,
1122 stage_params,
1123 directory_table_params: KeyValueOptions {
1124 options: directory_table_params,
1125 delimiter: KeyValueOptionsDelimiter::Space,
1126 },
1127 file_format: KeyValueOptions {
1128 options: file_format,
1129 delimiter: KeyValueOptionsDelimiter::Space,
1130 },
1131 copy_options: KeyValueOptions {
1132 options: copy_options,
1133 delimiter: KeyValueOptionsDelimiter::Space,
1134 },
1135 comment,
1136 })
1137}
1138
1139pub fn parse_stage_name_identifier(parser: &mut Parser) -> Result<Ident, ParserError> {
1140 let mut ident = String::new();
1141 while let Some(next_token) = parser.next_token_no_skip() {
1142 match &next_token.token {
1143 Token::Whitespace(_) | Token::SemiColon => break,
1144 Token::Period => {
1145 parser.prev_token();
1146 break;
1147 }
1148 Token::RParen => {
1149 parser.prev_token();
1150 break;
1151 }
1152 Token::AtSign => ident.push('@'),
1153 Token::Tilde => ident.push('~'),
1154 Token::Mod => ident.push('%'),
1155 Token::Div => ident.push('/'),
1156 Token::Plus => ident.push('+'),
1157 Token::Minus => ident.push('-'),
1158 Token::Number(n, _) => ident.push_str(n),
1159 Token::Word(w) => ident.push_str(&w.to_string()),
1160 _ => return parser.expected("stage name identifier", parser.peek_token()),
1161 }
1162 }
1163 Ok(Ident::new(ident))
1164}
1165
1166pub fn parse_snowflake_stage_name(parser: &mut Parser) -> Result<ObjectName, ParserError> {
1167 match parser.next_token().token {
1168 Token::AtSign => {
1169 parser.prev_token();
1170 let mut idents = vec![];
1171 loop {
1172 idents.push(parse_stage_name_identifier(parser)?);
1173 if !parser.consume_token(&Token::Period) {
1174 break;
1175 }
1176 }
1177 Ok(ObjectName::from(idents))
1178 }
1179 _ => {
1180 parser.prev_token();
1181 Ok(parser.parse_object_name(false)?)
1182 }
1183 }
1184}
1185
1186pub fn parse_copy_into(parser: &mut Parser) -> Result<Statement, ParserError> {
1189 let kind = match parser.peek_token().token {
1190 Token::AtSign => CopyIntoSnowflakeKind::Location,
1192 Token::SingleQuotedString(s) if s.contains("://") => CopyIntoSnowflakeKind::Location,
1194 _ => CopyIntoSnowflakeKind::Table,
1195 };
1196
1197 let mut files: Vec<String> = vec![];
1198 let mut from_transformations: Option<Vec<StageLoadSelectItemKind>> = None;
1199 let mut from_stage_alias = None;
1200 let mut from_stage = None;
1201 let mut stage_params = StageParamsObject {
1202 url: None,
1203 encryption: KeyValueOptions {
1204 options: vec![],
1205 delimiter: KeyValueOptionsDelimiter::Space,
1206 },
1207 endpoint: None,
1208 storage_integration: None,
1209 credentials: KeyValueOptions {
1210 options: vec![],
1211 delimiter: KeyValueOptionsDelimiter::Space,
1212 },
1213 };
1214 let mut from_query = None;
1215 let mut partition = None;
1216 let mut file_format = Vec::new();
1217 let mut pattern = None;
1218 let mut validation_mode = None;
1219 let mut copy_options = Vec::new();
1220
1221 let into: ObjectName = parse_snowflake_stage_name(parser)?;
1222 if kind == CopyIntoSnowflakeKind::Location {
1223 stage_params = parse_stage_params(parser)?;
1224 }
1225
1226 let into_columns = match &parser.peek_token().token {
1227 Token::LParen => Some(parser.parse_parenthesized_column_list(IsOptional::Optional, true)?),
1228 _ => None,
1229 };
1230
1231 parser.expect_keyword_is(Keyword::FROM)?;
1232 match parser.next_token().token {
1233 Token::LParen if kind == CopyIntoSnowflakeKind::Table => {
1234 parser.expect_keyword_is(Keyword::SELECT)?;
1236 from_transformations = parse_select_items_for_data_load(parser)?;
1237
1238 parser.expect_keyword_is(Keyword::FROM)?;
1239 from_stage = Some(parse_snowflake_stage_name(parser)?);
1240 stage_params = parse_stage_params(parser)?;
1241
1242 from_stage_alias = parser
1244 .maybe_parse_table_alias()?
1245 .map(|table_alias| table_alias.name);
1246 parser.expect_token(&Token::RParen)?;
1247 }
1248 Token::LParen if kind == CopyIntoSnowflakeKind::Location => {
1249 from_query = Some(parser.parse_query()?);
1251 parser.expect_token(&Token::RParen)?;
1252 }
1253 _ => {
1254 parser.prev_token();
1255 from_stage = Some(parse_snowflake_stage_name(parser)?);
1256 stage_params = parse_stage_params(parser)?;
1257
1258 from_stage_alias = if parser.parse_keyword(Keyword::AS) {
1260 Some(match parser.next_token().token {
1261 Token::Word(w) => Ok(Ident::new(w.value)),
1262 _ => parser.expected("stage alias", parser.peek_token()),
1263 }?)
1264 } else {
1265 None
1266 };
1267 }
1268 }
1269
1270 loop {
1271 if parser.parse_keyword(Keyword::FILE_FORMAT) {
1273 parser.expect_token(&Token::Eq)?;
1274 file_format = parser.parse_key_value_options(true, &[])?.options;
1275 } else if parser.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
1277 partition = Some(Box::new(parser.parse_expr()?))
1278 } else if parser.parse_keyword(Keyword::FILES) {
1280 parser.expect_token(&Token::Eq)?;
1281 parser.expect_token(&Token::LParen)?;
1282 let mut continue_loop = true;
1283 while continue_loop {
1284 continue_loop = false;
1285 let next_token = parser.next_token();
1286 match next_token.token {
1287 Token::SingleQuotedString(s) => files.push(s),
1288 _ => parser.expected("file token", next_token)?,
1289 };
1290 if parser.next_token().token.eq(&Token::Comma) {
1291 continue_loop = true;
1292 } else {
1293 parser.prev_token(); }
1295 }
1296 parser.expect_token(&Token::RParen)?;
1297 } else if parser.parse_keyword(Keyword::PATTERN) {
1299 parser.expect_token(&Token::Eq)?;
1300 let next_token = parser.next_token();
1301 pattern = Some(match next_token.token {
1302 Token::SingleQuotedString(s) => s,
1303 _ => parser.expected("pattern", next_token)?,
1304 });
1305 } else if parser.parse_keyword(Keyword::VALIDATION_MODE) {
1307 parser.expect_token(&Token::Eq)?;
1308 validation_mode = Some(parser.next_token().token.to_string());
1309 } else if parser.parse_keyword(Keyword::COPY_OPTIONS) {
1311 parser.expect_token(&Token::Eq)?;
1312 copy_options = parser.parse_key_value_options(true, &[])?.options;
1313 } else {
1314 match parser.next_token().token {
1315 Token::SemiColon | Token::EOF => break,
1316 Token::Comma => continue,
1317 Token::Word(key) => copy_options.push(parser.parse_key_value_option(&key)?),
1320 _ => return parser.expected("another copy option, ; or EOF'", parser.peek_token()),
1321 }
1322 }
1323 }
1324
1325 Ok(Statement::CopyIntoSnowflake {
1326 kind,
1327 into,
1328 into_columns,
1329 from_obj: from_stage,
1330 from_obj_alias: from_stage_alias,
1331 stage_params,
1332 from_transformations,
1333 from_query,
1334 files: if files.is_empty() { None } else { Some(files) },
1335 pattern,
1336 file_format: KeyValueOptions {
1337 options: file_format,
1338 delimiter: KeyValueOptionsDelimiter::Space,
1339 },
1340 copy_options: KeyValueOptions {
1341 options: copy_options,
1342 delimiter: KeyValueOptionsDelimiter::Space,
1343 },
1344 validation_mode,
1345 partition,
1346 })
1347}
1348
1349fn parse_select_items_for_data_load(
1350 parser: &mut Parser,
1351) -> Result<Option<Vec<StageLoadSelectItemKind>>, ParserError> {
1352 let mut select_items: Vec<StageLoadSelectItemKind> = vec![];
1353 loop {
1354 match parser.maybe_parse(parse_select_item_for_data_load)? {
1355 Some(item) => select_items.push(StageLoadSelectItemKind::StageLoadSelectItem(item)),
1357 None => select_items.push(StageLoadSelectItemKind::SelectItem(
1359 parser.parse_select_item()?,
1360 )),
1361 }
1362 if matches!(parser.peek_token_ref().token, Token::Comma) {
1363 parser.advance_token();
1364 } else {
1365 break;
1366 }
1367 }
1368 Ok(Some(select_items))
1369}
1370
1371fn parse_select_item_for_data_load(
1372 parser: &mut Parser,
1373) -> Result<StageLoadSelectItem, ParserError> {
1374 let mut alias: Option<Ident> = None;
1375 let mut file_col_num: i32 = 0;
1376 let mut element: Option<Ident> = None;
1377 let mut item_as: Option<Ident> = None;
1378
1379 let next_token = parser.next_token();
1380 match next_token.token {
1381 Token::Placeholder(w) => {
1382 file_col_num = w.to_string().split_off(1).parse::<i32>().map_err(|e| {
1383 ParserError::ParserError(format!("Could not parse '{w}' as i32: {e}"))
1384 })?;
1385 Ok(())
1386 }
1387 Token::Word(w) => {
1388 alias = Some(Ident::new(w.value));
1389 Ok(())
1390 }
1391 _ => parser.expected("alias or file_col_num", next_token),
1392 }?;
1393
1394 if alias.is_some() {
1395 parser.expect_token(&Token::Period)?;
1396 let col_num_token = parser.next_token();
1398 match col_num_token.token {
1399 Token::Placeholder(w) => {
1400 file_col_num = w.to_string().split_off(1).parse::<i32>().map_err(|e| {
1401 ParserError::ParserError(format!("Could not parse '{w}' as i32: {e}"))
1402 })?;
1403 Ok(())
1404 }
1405 _ => parser.expected("file_col_num", col_num_token),
1406 }?;
1407 }
1408
1409 match parser.next_token().token {
1411 Token::Colon => {
1412 element = Some(Ident::new(match parser.next_token().token {
1414 Token::Word(w) => Ok(w.value),
1415 _ => parser.expected("file_col_num", parser.peek_token()),
1416 }?));
1417 }
1418 _ => {
1419 parser.prev_token();
1421 }
1422 }
1423
1424 if parser.parse_keyword(Keyword::AS) {
1426 item_as = Some(match parser.next_token().token {
1427 Token::Word(w) => Ok(Ident::new(w.value)),
1428 _ => parser.expected("column item alias", parser.peek_token()),
1429 }?);
1430 }
1431
1432 Ok(StageLoadSelectItem {
1433 alias,
1434 file_col_num,
1435 element,
1436 item_as,
1437 })
1438}
1439
1440fn parse_stage_params(parser: &mut Parser) -> Result<StageParamsObject, ParserError> {
1441 let (mut url, mut storage_integration, mut endpoint) = (None, None, None);
1442 let mut encryption: KeyValueOptions = KeyValueOptions {
1443 options: vec![],
1444 delimiter: KeyValueOptionsDelimiter::Space,
1445 };
1446 let mut credentials: KeyValueOptions = KeyValueOptions {
1447 options: vec![],
1448 delimiter: KeyValueOptionsDelimiter::Space,
1449 };
1450
1451 if parser.parse_keyword(Keyword::URL) {
1453 parser.expect_token(&Token::Eq)?;
1454 url = Some(match parser.next_token().token {
1455 Token::SingleQuotedString(word) => Ok(word),
1456 _ => parser.expected("a URL statement", parser.peek_token()),
1457 }?)
1458 }
1459
1460 if parser.parse_keyword(Keyword::STORAGE_INTEGRATION) {
1462 parser.expect_token(&Token::Eq)?;
1463 storage_integration = Some(parser.next_token().token.to_string());
1464 }
1465
1466 if parser.parse_keyword(Keyword::ENDPOINT) {
1468 parser.expect_token(&Token::Eq)?;
1469 endpoint = Some(match parser.next_token().token {
1470 Token::SingleQuotedString(word) => Ok(word),
1471 _ => parser.expected("an endpoint statement", parser.peek_token()),
1472 }?)
1473 }
1474
1475 if parser.parse_keyword(Keyword::CREDENTIALS) {
1477 parser.expect_token(&Token::Eq)?;
1478 credentials = KeyValueOptions {
1479 options: parser.parse_key_value_options(true, &[])?.options,
1480 delimiter: KeyValueOptionsDelimiter::Space,
1481 };
1482 }
1483
1484 if parser.parse_keyword(Keyword::ENCRYPTION) {
1486 parser.expect_token(&Token::Eq)?;
1487 encryption = KeyValueOptions {
1488 options: parser.parse_key_value_options(true, &[])?.options,
1489 delimiter: KeyValueOptionsDelimiter::Space,
1490 };
1491 }
1492
1493 Ok(StageParamsObject {
1494 url,
1495 encryption,
1496 endpoint,
1497 storage_integration,
1498 credentials,
1499 })
1500}
1501
1502fn parse_session_options(
1507 parser: &mut Parser,
1508 set: bool,
1509) -> Result<Vec<KeyValueOption>, ParserError> {
1510 let mut options: Vec<KeyValueOption> = Vec::new();
1511 let empty = String::new;
1512 loop {
1513 let next_token = parser.peek_token();
1514 match next_token.token {
1515 Token::SemiColon | Token::EOF => break,
1516 Token::Comma => {
1517 parser.advance_token();
1518 continue;
1519 }
1520 Token::Word(key) => {
1521 parser.advance_token();
1522 if set {
1523 let option = parser.parse_key_value_option(&key)?;
1524 options.push(option);
1525 } else {
1526 options.push(KeyValueOption {
1527 option_name: key.value,
1528 option_value: KeyValueOptionKind::Single(Value::Placeholder(empty())),
1529 });
1530 }
1531 }
1532 _ => {
1533 return parser.expected("another option or end of statement", next_token);
1534 }
1535 }
1536 }
1537 if options.is_empty() {
1538 Err(ParserError::ParserError(
1539 "expected at least one option".to_string(),
1540 ))
1541 } else {
1542 Ok(options)
1543 }
1544}
1545
1546fn parse_identity_property(parser: &mut Parser) -> Result<IdentityProperty, ParserError> {
1553 let parameters = if parser.consume_token(&Token::LParen) {
1554 let seed = parser.parse_number()?;
1555 parser.expect_token(&Token::Comma)?;
1556 let increment = parser.parse_number()?;
1557 parser.expect_token(&Token::RParen)?;
1558
1559 Some(IdentityPropertyFormatKind::FunctionCall(
1560 IdentityParameters { seed, increment },
1561 ))
1562 } else if parser.parse_keyword(Keyword::START) {
1563 let seed = parser.parse_number()?;
1564 parser.expect_keyword_is(Keyword::INCREMENT)?;
1565 let increment = parser.parse_number()?;
1566
1567 Some(IdentityPropertyFormatKind::StartAndIncrement(
1568 IdentityParameters { seed, increment },
1569 ))
1570 } else {
1571 None
1572 };
1573 let order = match parser.parse_one_of_keywords(&[Keyword::ORDER, Keyword::NOORDER]) {
1574 Some(Keyword::ORDER) => Some(IdentityPropertyOrder::Order),
1575 Some(Keyword::NOORDER) => Some(IdentityPropertyOrder::NoOrder),
1576 _ => None,
1577 };
1578 Ok(IdentityProperty { parameters, order })
1579}
1580
1581fn parse_column_policy_property(
1588 parser: &mut Parser,
1589 with: bool,
1590) -> Result<ColumnPolicyProperty, ParserError> {
1591 let policy_name = parser.parse_object_name(false)?;
1592 let using_columns = if parser.parse_keyword(Keyword::USING) {
1593 parser.expect_token(&Token::LParen)?;
1594 let columns = parser.parse_comma_separated(|p| p.parse_identifier())?;
1595 parser.expect_token(&Token::RParen)?;
1596 Some(columns)
1597 } else {
1598 None
1599 };
1600
1601 Ok(ColumnPolicyProperty {
1602 with,
1603 policy_name,
1604 using_columns,
1605 })
1606}
1607
1608fn parse_column_tags(parser: &mut Parser, with: bool) -> Result<TagsColumnOption, ParserError> {
1615 parser.expect_token(&Token::LParen)?;
1616 let tags = parser.parse_comma_separated(Parser::parse_tag)?;
1617 parser.expect_token(&Token::RParen)?;
1618
1619 Ok(TagsColumnOption { with, tags })
1620}
1621
1622fn parse_show_objects(terse: bool, parser: &mut Parser) -> Result<Statement, ParserError> {
1625 let show_options = parser.parse_show_stmt_options()?;
1626 Ok(Statement::ShowObjects(ShowObjects {
1627 terse,
1628 show_options,
1629 }))
1630}