1#[cfg(not(feature = "std"))]
19use crate::alloc::string::ToString;
20use crate::ast::helpers::key_value_options::{
21 KeyValueOption, KeyValueOptionType, KeyValueOptions, KeyValueOptionsDelimiter,
22};
23use crate::ast::helpers::stmt_create_database::CreateDatabaseBuilder;
24use crate::ast::helpers::stmt_create_table::CreateTableBuilder;
25use crate::ast::helpers::stmt_data_loading::{
26 FileStagingCommand, StageLoadSelectItem, StageLoadSelectItemKind, StageParamsObject,
27};
28use crate::ast::{
29 CatalogSyncNamespaceMode, ColumnOption, ColumnPolicy, ColumnPolicyProperty, ContactEntry,
30 CopyIntoSnowflakeKind, CreateTableLikeKind, DollarQuotedString, Ident, IdentityParameters,
31 IdentityProperty, IdentityPropertyFormatKind, IdentityPropertyKind, IdentityPropertyOrder,
32 InitializeKind, ObjectName, ObjectNamePart, RefreshModeKind, RowAccessPolicy, ShowObjects,
33 SqlOption, Statement, StorageSerializationPolicy, TagsColumnOption, WrappedCollection,
34};
35use crate::dialect::{Dialect, Precedence};
36use crate::keywords::Keyword;
37use crate::parser::{IsOptional, Parser, ParserError};
38use crate::tokenizer::Token;
39#[cfg(not(feature = "std"))]
40use alloc::boxed::Box;
41#[cfg(not(feature = "std"))]
42use alloc::string::String;
43#[cfg(not(feature = "std"))]
44use alloc::vec::Vec;
45#[cfg(not(feature = "std"))]
46use alloc::{format, vec};
47
48use super::keywords::RESERVED_FOR_IDENTIFIER;
49
50const RESERVED_KEYWORDS_FOR_SELECT_ITEM_OPERATOR: [Keyword; 1] = [Keyword::CONNECT_BY_ROOT];
51
52const RESERVED_KEYWORDS_FOR_TABLE_FACTOR: &[Keyword] = &[
54 Keyword::ALL,
55 Keyword::ALTER,
56 Keyword::AND,
57 Keyword::ANY,
58 Keyword::AS,
59 Keyword::BETWEEN,
60 Keyword::BY,
61 Keyword::CHECK,
62 Keyword::COLUMN,
63 Keyword::CONNECT,
64 Keyword::CREATE,
65 Keyword::CROSS,
66 Keyword::CURRENT,
67 Keyword::DELETE,
68 Keyword::DISTINCT,
69 Keyword::DROP,
70 Keyword::ELSE,
71 Keyword::EXISTS,
72 Keyword::FOLLOWING,
73 Keyword::FOR,
74 Keyword::FROM,
75 Keyword::FULL,
76 Keyword::GRANT,
77 Keyword::GROUP,
78 Keyword::HAVING,
79 Keyword::ILIKE,
80 Keyword::IN,
81 Keyword::INCREMENT,
82 Keyword::INNER,
83 Keyword::INSERT,
84 Keyword::INTERSECT,
85 Keyword::INTO,
86 Keyword::IS,
87 Keyword::JOIN,
88 Keyword::LEFT,
89 Keyword::LIKE,
90 Keyword::MINUS,
91 Keyword::NATURAL,
92 Keyword::NOT,
93 Keyword::NULL,
94 Keyword::OF,
95 Keyword::ON,
96 Keyword::OR,
97 Keyword::ORDER,
98 Keyword::QUALIFY,
99 Keyword::REGEXP,
100 Keyword::REVOKE,
101 Keyword::RIGHT,
102 Keyword::RLIKE,
103 Keyword::ROW,
104 Keyword::ROWS,
105 Keyword::SAMPLE,
106 Keyword::SELECT,
107 Keyword::SET,
108 Keyword::SOME,
109 Keyword::START,
110 Keyword::TABLE,
111 Keyword::TABLESAMPLE,
112 Keyword::THEN,
113 Keyword::TO,
114 Keyword::TRIGGER,
115 Keyword::UNION,
116 Keyword::UNIQUE,
117 Keyword::UPDATE,
118 Keyword::USING,
119 Keyword::VALUES,
120 Keyword::WHEN,
121 Keyword::WHENEVER,
122 Keyword::WHERE,
123 Keyword::WINDOW,
124 Keyword::WITH,
125];
126
127#[derive(Debug, Default)]
129pub struct SnowflakeDialect;
130
131impl Dialect for SnowflakeDialect {
132 fn is_identifier_start(&self, ch: char) -> bool {
134 ch.is_ascii_lowercase() || ch.is_ascii_uppercase() || ch == '_'
135 }
136
137 fn supports_projection_trailing_commas(&self) -> bool {
138 true
139 }
140
141 fn supports_from_trailing_commas(&self) -> bool {
142 true
143 }
144
145 fn supports_object_name_double_dot_notation(&self) -> bool {
150 true
151 }
152
153 fn is_identifier_part(&self, ch: char) -> bool {
154 ch.is_ascii_lowercase()
155 || ch.is_ascii_uppercase()
156 || ch.is_ascii_digit()
157 || ch == '$'
158 || ch == '_'
159 }
160
161 fn supports_string_literal_backslash_escape(&self) -> bool {
163 true
164 }
165
166 fn supports_within_after_array_aggregation(&self) -> bool {
167 true
168 }
169
170 fn supports_outer_join_operator(&self) -> bool {
172 true
173 }
174
175 fn supports_connect_by(&self) -> bool {
176 true
177 }
178
179 fn supports_execute_immediate(&self) -> bool {
181 true
182 }
183
184 fn supports_match_recognize(&self) -> bool {
185 true
186 }
187
188 fn supports_dictionary_syntax(&self) -> bool {
193 true
194 }
195
196 fn supports_window_function_null_treatment_arg(&self) -> bool {
199 true
200 }
201
202 fn supports_parenthesized_set_variables(&self) -> bool {
204 true
205 }
206
207 fn supports_comment_on(&self) -> bool {
209 true
210 }
211
212 fn parse_statement(&self, parser: &mut Parser) -> Option<Result<Statement, ParserError>> {
213 if parser.parse_keyword(Keyword::BEGIN) {
214 return Some(parser.parse_begin_exception_end());
215 }
216
217 if parser.parse_keywords(&[Keyword::ALTER, Keyword::SESSION]) {
218 let set = match parser.parse_one_of_keywords(&[Keyword::SET, Keyword::UNSET]) {
220 Some(Keyword::SET) => true,
221 Some(Keyword::UNSET) => false,
222 _ => return Some(parser.expected("SET or UNSET", parser.peek_token())),
223 };
224 return Some(parse_alter_session(parser, set));
225 }
226
227 if parser.parse_keyword(Keyword::CREATE) {
228 let or_replace = parser.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
231 let global = match parser.parse_one_of_keywords(&[Keyword::LOCAL, Keyword::GLOBAL]) {
233 Some(Keyword::LOCAL) => Some(false),
234 Some(Keyword::GLOBAL) => Some(true),
235 _ => None,
236 };
237
238 let dynamic = parser.parse_keyword(Keyword::DYNAMIC);
239
240 let mut temporary = false;
241 let mut volatile = false;
242 let mut transient = false;
243 let mut iceberg = false;
244
245 match parser.parse_one_of_keywords(&[
246 Keyword::TEMP,
247 Keyword::TEMPORARY,
248 Keyword::VOLATILE,
249 Keyword::TRANSIENT,
250 Keyword::ICEBERG,
251 ]) {
252 Some(Keyword::TEMP | Keyword::TEMPORARY) => temporary = true,
253 Some(Keyword::VOLATILE) => volatile = true,
254 Some(Keyword::TRANSIENT) => transient = true,
255 Some(Keyword::ICEBERG) => iceberg = true,
256 _ => {}
257 }
258
259 if parser.parse_keyword(Keyword::STAGE) {
260 return Some(parse_create_stage(or_replace, temporary, parser));
262 } else if parser.parse_keyword(Keyword::TABLE) {
263 return Some(parse_create_table(
264 or_replace, global, temporary, volatile, transient, iceberg, dynamic, parser,
265 ));
266 } else if parser.parse_keyword(Keyword::DATABASE) {
267 return Some(parse_create_database(or_replace, transient, parser));
268 } else {
269 let mut back = 1;
271 if or_replace {
272 back += 2
273 }
274 if temporary {
275 back += 1
276 }
277 for _i in 0..back {
278 parser.prev_token();
279 }
280 }
281 }
282 if parser.parse_keywords(&[Keyword::COPY, Keyword::INTO]) {
283 return Some(parse_copy_into(parser));
285 }
286
287 if let Some(kw) = parser.parse_one_of_keywords(&[
288 Keyword::LIST,
289 Keyword::LS,
290 Keyword::REMOVE,
291 Keyword::RM,
292 ]) {
293 return Some(parse_file_staging_command(kw, parser));
294 }
295
296 if parser.parse_keyword(Keyword::SHOW) {
297 let terse = parser.parse_keyword(Keyword::TERSE);
298 if parser.parse_keyword(Keyword::OBJECTS) {
299 return Some(parse_show_objects(terse, parser));
300 }
301 if terse {
303 parser.prev_token();
304 }
305 parser.prev_token();
307 }
308
309 None
310 }
311
312 fn parse_column_option(
313 &self,
314 parser: &mut Parser,
315 ) -> Result<Option<Result<Option<ColumnOption>, ParserError>>, ParserError> {
316 parser.maybe_parse(|parser| {
317 let with = parser.parse_keyword(Keyword::WITH);
318
319 if parser.parse_keyword(Keyword::IDENTITY) {
320 Ok(parse_identity_property(parser)
321 .map(|p| Some(ColumnOption::Identity(IdentityPropertyKind::Identity(p)))))
322 } else if parser.parse_keyword(Keyword::AUTOINCREMENT) {
323 Ok(parse_identity_property(parser).map(|p| {
324 Some(ColumnOption::Identity(IdentityPropertyKind::Autoincrement(
325 p,
326 )))
327 }))
328 } else if parser.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
329 Ok(parse_column_policy_property(parser, with)
330 .map(|p| Some(ColumnOption::Policy(ColumnPolicy::MaskingPolicy(p)))))
331 } else if parser.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
332 Ok(parse_column_policy_property(parser, with)
333 .map(|p| Some(ColumnOption::Policy(ColumnPolicy::ProjectionPolicy(p)))))
334 } else if parser.parse_keywords(&[Keyword::TAG]) {
335 Ok(parse_column_tags(parser, with).map(|p| Some(ColumnOption::Tags(p))))
336 } else {
337 Err(ParserError::ParserError("not found match".to_string()))
338 }
339 })
340 }
341
342 fn get_next_precedence(&self, parser: &Parser) -> Option<Result<u8, ParserError>> {
343 let token = parser.peek_token();
344 match token.token {
346 Token::Colon => Some(Ok(self.prec_value(Precedence::DoubleColon))),
347 _ => None,
348 }
349 }
350
351 fn describe_requires_table_keyword(&self) -> bool {
352 true
353 }
354
355 fn allow_extract_custom(&self) -> bool {
356 true
357 }
358
359 fn allow_extract_single_quotes(&self) -> bool {
360 true
361 }
362
363 fn supports_show_like_before_in(&self) -> bool {
366 true
367 }
368
369 fn supports_left_associative_joins_without_parens(&self) -> bool {
370 false
371 }
372
373 fn is_reserved_for_identifier(&self, kw: Keyword) -> bool {
374 if matches!(kw, Keyword::INTERVAL) {
377 false
378 } else {
379 RESERVED_FOR_IDENTIFIER.contains(&kw)
380 }
381 }
382
383 fn supports_partiql(&self) -> bool {
384 true
385 }
386
387 fn is_column_alias(&self, kw: &Keyword, parser: &mut Parser) -> bool {
388 match kw {
389 Keyword::EXCEPT
393 | Keyword::RETURNING if !matches!(parser.peek_token_ref().token, Token::Comma | Token::EOF) =>
395 {
396 false
397 }
398
399 Keyword::LIMIT | Keyword::OFFSET if peek_for_limit_options(parser) => false,
402
403 Keyword::FETCH if parser.peek_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]).is_some()
408 || peek_for_limit_options(parser) =>
409 {
410 false
411 }
412
413 Keyword::FROM
417 | Keyword::GROUP
418 | Keyword::HAVING
419 | Keyword::INTERSECT
420 | Keyword::INTO
421 | Keyword::MINUS
422 | Keyword::ORDER
423 | Keyword::SELECT
424 | Keyword::UNION
425 | Keyword::WHERE
426 | Keyword::WITH => false,
427
428 _ => true,
430 }
431 }
432
433 fn is_table_alias(&self, kw: &Keyword, parser: &mut Parser) -> bool {
434 match kw {
435 Keyword::RETURNING
438 | Keyword::INNER
439 | Keyword::USING
440 | Keyword::PIVOT
441 | Keyword::UNPIVOT
442 | Keyword::EXCEPT
443 | Keyword::MATCH_RECOGNIZE
444 if !matches!(parser.peek_token_ref().token, Token::SemiColon | Token::EOF) =>
445 {
446 false
447 }
448
449 Keyword::LIMIT | Keyword::OFFSET if peek_for_limit_options(parser) => false,
453
454 Keyword::FETCH
459 if parser
460 .peek_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT])
461 .is_some()
462 || peek_for_limit_options(parser) =>
463 {
464 false
465 }
466
467 Keyword::RIGHT | Keyword::LEFT | Keyword::SEMI | Keyword::ANTI
470 if parser
471 .peek_one_of_keywords(&[Keyword::JOIN, Keyword::OUTER])
472 .is_some() =>
473 {
474 false
475 }
476
477 Keyword::GLOBAL if parser.peek_keyword(Keyword::FULL) => false,
478
479 Keyword::WITH
483 | Keyword::ORDER
484 | Keyword::SELECT
485 | Keyword::WHERE
486 | Keyword::GROUP
487 | Keyword::HAVING
488 | Keyword::LATERAL
489 | Keyword::UNION
490 | Keyword::INTERSECT
491 | Keyword::MINUS
492 | Keyword::ON
493 | Keyword::JOIN
494 | Keyword::INNER
495 | Keyword::CROSS
496 | Keyword::FULL
497 | Keyword::LEFT
498 | Keyword::RIGHT
499 | Keyword::NATURAL
500 | Keyword::USING
501 | Keyword::ASOF
502 | Keyword::MATCH_CONDITION
503 | Keyword::SET
504 | Keyword::QUALIFY
505 | Keyword::FOR
506 | Keyword::START
507 | Keyword::CONNECT
508 | Keyword::SAMPLE
509 | Keyword::TABLESAMPLE
510 | Keyword::FROM => false,
511
512 _ => true,
514 }
515 }
516
517 fn is_table_factor(&self, kw: &Keyword, parser: &mut Parser) -> bool {
518 match kw {
519 Keyword::LIMIT if peek_for_limit_options(parser) => false,
520 Keyword::TABLE if matches!(parser.peek_token_ref().token, Token::LParen) => true,
522 _ => !RESERVED_KEYWORDS_FOR_TABLE_FACTOR.contains(kw),
523 }
524 }
525
526 fn supports_timestamp_versioning(&self) -> bool {
528 true
529 }
530
531 fn supports_group_by_expr(&self) -> bool {
533 true
534 }
535
536 fn get_reserved_keywords_for_select_item_operator(&self) -> &[Keyword] {
538 &RESERVED_KEYWORDS_FOR_SELECT_ITEM_OPERATOR
539 }
540
541 fn supports_space_separated_column_options(&self) -> bool {
542 true
543 }
544
545 fn supports_comma_separated_drop_column_list(&self) -> bool {
546 true
547 }
548
549 fn is_identifier_generating_function_name(
550 &self,
551 ident: &Ident,
552 name_parts: &[ObjectNamePart],
553 ) -> bool {
554 ident.quote_style.is_none()
555 && ident.value.to_lowercase() == "identifier"
556 && !name_parts
557 .iter()
558 .any(|p| matches!(p, ObjectNamePart::Function(_)))
559 }
560
561 fn supports_select_expr_star(&self) -> bool {
563 true
564 }
565
566 fn supports_select_wildcard_exclude(&self) -> bool {
567 true
568 }
569
570 fn supports_semantic_view_table_factor(&self) -> bool {
571 true
572 }
573}
574
575fn peek_for_limit_options(parser: &Parser) -> bool {
578 match &parser.peek_token_ref().token {
579 Token::Number(_, _) | Token::Placeholder(_) => true,
580 Token::SingleQuotedString(val) if val.is_empty() => true,
581 Token::DollarQuotedString(DollarQuotedString { value, .. }) if value.is_empty() => true,
582 Token::Word(w) if w.keyword == Keyword::NULL => true,
583 _ => false,
584 }
585}
586
587fn parse_file_staging_command(kw: Keyword, parser: &mut Parser) -> Result<Statement, ParserError> {
588 let stage = parse_snowflake_stage_name(parser)?;
589 let pattern = if parser.parse_keyword(Keyword::PATTERN) {
590 parser.expect_token(&Token::Eq)?;
591 Some(parser.parse_literal_string()?)
592 } else {
593 None
594 };
595
596 match kw {
597 Keyword::LIST | Keyword::LS => Ok(Statement::List(FileStagingCommand { stage, pattern })),
598 Keyword::REMOVE | Keyword::RM => {
599 Ok(Statement::Remove(FileStagingCommand { stage, pattern }))
600 }
601 _ => Err(ParserError::ParserError(
602 "unexpected stage command, expecting LIST, LS, REMOVE or RM".to_string(),
603 )),
604 }
605}
606
607fn parse_alter_session(parser: &mut Parser, set: bool) -> Result<Statement, ParserError> {
610 let session_options = parse_session_options(parser, set)?;
611 Ok(Statement::AlterSession {
612 set,
613 session_params: KeyValueOptions {
614 options: session_options,
615 delimiter: KeyValueOptionsDelimiter::Space,
616 },
617 })
618}
619
620#[allow(clippy::too_many_arguments)]
624pub fn parse_create_table(
625 or_replace: bool,
626 global: Option<bool>,
627 temporary: bool,
628 volatile: bool,
629 transient: bool,
630 iceberg: bool,
631 dynamic: bool,
632 parser: &mut Parser,
633) -> Result<Statement, ParserError> {
634 let if_not_exists = parser.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
635 let table_name = parser.parse_object_name(false)?;
636
637 let mut builder = CreateTableBuilder::new(table_name)
638 .or_replace(or_replace)
639 .if_not_exists(if_not_exists)
640 .temporary(temporary)
641 .transient(transient)
642 .volatile(volatile)
643 .iceberg(iceberg)
644 .global(global)
645 .dynamic(dynamic)
646 .hive_formats(Some(Default::default()));
647
648 let mut plain_options = vec![];
655
656 loop {
657 let next_token = parser.next_token();
658 match &next_token.token {
659 Token::Word(word) => match word.keyword {
660 Keyword::COPY => {
661 parser.expect_keyword_is(Keyword::GRANTS)?;
662 builder = builder.copy_grants(true);
663 }
664 Keyword::COMMENT => {
665 parser.prev_token();
667 if let Some(comment_def) = parser.parse_optional_inline_comment()? {
668 plain_options.push(SqlOption::Comment(comment_def))
669 }
670 }
671 Keyword::AS => {
672 let query = parser.parse_query()?;
673 builder = builder.query(Some(query));
674 }
675 Keyword::CLONE => {
676 let clone = parser.parse_object_name(false).ok();
677 builder = builder.clone_clause(clone);
678 }
679 Keyword::LIKE => {
680 let name = parser.parse_object_name(false)?;
681 builder = builder.like(Some(CreateTableLikeKind::Plain(
682 crate::ast::CreateTableLike {
683 name,
684 defaults: None,
685 },
686 )));
687 }
688 Keyword::CLUSTER => {
689 parser.expect_keyword_is(Keyword::BY)?;
690 parser.expect_token(&Token::LParen)?;
691 let cluster_by = Some(WrappedCollection::Parentheses(
692 parser.parse_comma_separated(|p| p.parse_expr())?,
693 ));
694 parser.expect_token(&Token::RParen)?;
695
696 builder = builder.cluster_by(cluster_by)
697 }
698 Keyword::ENABLE_SCHEMA_EVOLUTION => {
699 parser.expect_token(&Token::Eq)?;
700 builder = builder.enable_schema_evolution(Some(parser.parse_boolean_string()?));
701 }
702 Keyword::CHANGE_TRACKING => {
703 parser.expect_token(&Token::Eq)?;
704 builder = builder.change_tracking(Some(parser.parse_boolean_string()?));
705 }
706 Keyword::DATA_RETENTION_TIME_IN_DAYS => {
707 parser.expect_token(&Token::Eq)?;
708 let data_retention_time_in_days = parser.parse_literal_uint()?;
709 builder =
710 builder.data_retention_time_in_days(Some(data_retention_time_in_days));
711 }
712 Keyword::MAX_DATA_EXTENSION_TIME_IN_DAYS => {
713 parser.expect_token(&Token::Eq)?;
714 let max_data_extension_time_in_days = parser.parse_literal_uint()?;
715 builder = builder
716 .max_data_extension_time_in_days(Some(max_data_extension_time_in_days));
717 }
718 Keyword::DEFAULT_DDL_COLLATION => {
719 parser.expect_token(&Token::Eq)?;
720 let default_ddl_collation = parser.parse_literal_string()?;
721 builder = builder.default_ddl_collation(Some(default_ddl_collation));
722 }
723 Keyword::WITH => {
726 parser.expect_one_of_keywords(&[
727 Keyword::AGGREGATION,
728 Keyword::TAG,
729 Keyword::ROW,
730 ])?;
731 parser.prev_token();
732 }
733 Keyword::AGGREGATION => {
734 parser.expect_keyword_is(Keyword::POLICY)?;
735 let aggregation_policy = parser.parse_object_name(false)?;
736 builder = builder.with_aggregation_policy(Some(aggregation_policy));
737 }
738 Keyword::ROW => {
739 parser.expect_keywords(&[Keyword::ACCESS, Keyword::POLICY])?;
740 let policy = parser.parse_object_name(false)?;
741 parser.expect_keyword_is(Keyword::ON)?;
742 parser.expect_token(&Token::LParen)?;
743 let columns = parser.parse_comma_separated(|p| p.parse_identifier())?;
744 parser.expect_token(&Token::RParen)?;
745
746 builder =
747 builder.with_row_access_policy(Some(RowAccessPolicy::new(policy, columns)))
748 }
749 Keyword::TAG => {
750 parser.expect_token(&Token::LParen)?;
751 let tags = parser.parse_comma_separated(Parser::parse_tag)?;
752 parser.expect_token(&Token::RParen)?;
753 builder = builder.with_tags(Some(tags));
754 }
755 Keyword::ON if parser.parse_keyword(Keyword::COMMIT) => {
756 let on_commit = Some(parser.parse_create_table_on_commit()?);
757 builder = builder.on_commit(on_commit);
758 }
759 Keyword::EXTERNAL_VOLUME => {
760 parser.expect_token(&Token::Eq)?;
761 builder.external_volume = Some(parser.parse_literal_string()?);
762 }
763 Keyword::CATALOG => {
764 parser.expect_token(&Token::Eq)?;
765 builder.catalog = Some(parser.parse_literal_string()?);
766 }
767 Keyword::BASE_LOCATION => {
768 parser.expect_token(&Token::Eq)?;
769 builder.base_location = Some(parser.parse_literal_string()?);
770 }
771 Keyword::CATALOG_SYNC => {
772 parser.expect_token(&Token::Eq)?;
773 builder.catalog_sync = Some(parser.parse_literal_string()?);
774 }
775 Keyword::STORAGE_SERIALIZATION_POLICY => {
776 parser.expect_token(&Token::Eq)?;
777
778 builder.storage_serialization_policy =
779 Some(parse_storage_serialization_policy(parser)?);
780 }
781 Keyword::IF if parser.parse_keywords(&[Keyword::NOT, Keyword::EXISTS]) => {
782 builder = builder.if_not_exists(true);
783 }
784 Keyword::TARGET_LAG => {
785 parser.expect_token(&Token::Eq)?;
786 let target_lag = parser.parse_literal_string()?;
787 builder = builder.target_lag(Some(target_lag));
788 }
789 Keyword::WAREHOUSE => {
790 parser.expect_token(&Token::Eq)?;
791 let warehouse = parser.parse_identifier()?;
792 builder = builder.warehouse(Some(warehouse));
793 }
794 Keyword::AT | Keyword::BEFORE => {
795 parser.prev_token();
796 let version = parser.maybe_parse_table_version()?;
797 builder = builder.version(version);
798 }
799 Keyword::REFRESH_MODE => {
800 parser.expect_token(&Token::Eq)?;
801 let refresh_mode = match parser.parse_one_of_keywords(&[
802 Keyword::AUTO,
803 Keyword::FULL,
804 Keyword::INCREMENTAL,
805 ]) {
806 Some(Keyword::AUTO) => Some(RefreshModeKind::Auto),
807 Some(Keyword::FULL) => Some(RefreshModeKind::Full),
808 Some(Keyword::INCREMENTAL) => Some(RefreshModeKind::Incremental),
809 _ => return parser.expected("AUTO, FULL or INCREMENTAL", next_token),
810 };
811 builder = builder.refresh_mode(refresh_mode);
812 }
813 Keyword::INITIALIZE => {
814 parser.expect_token(&Token::Eq)?;
815 let initialize = match parser
816 .parse_one_of_keywords(&[Keyword::ON_CREATE, Keyword::ON_SCHEDULE])
817 {
818 Some(Keyword::ON_CREATE) => Some(InitializeKind::OnCreate),
819 Some(Keyword::ON_SCHEDULE) => Some(InitializeKind::OnSchedule),
820 _ => return parser.expected("ON_CREATE or ON_SCHEDULE", next_token),
821 };
822 builder = builder.initialize(initialize);
823 }
824 Keyword::REQUIRE if parser.parse_keyword(Keyword::USER) => {
825 builder = builder.require_user(true);
826 }
827 _ => {
828 return parser.expected("end of statement", next_token);
829 }
830 },
831 Token::LParen => {
832 parser.prev_token();
833 let (columns, constraints) = parser.parse_columns()?;
834 builder = builder.columns(columns).constraints(constraints);
835 }
836 Token::EOF => {
837 break;
838 }
839 Token::SemiColon => {
840 parser.prev_token();
841 break;
842 }
843 _ => {
844 return parser.expected("end of statement", next_token);
845 }
846 }
847 }
848 let table_options = if !plain_options.is_empty() {
849 crate::ast::CreateTableOptions::Plain(plain_options)
850 } else {
851 crate::ast::CreateTableOptions::None
852 };
853
854 builder = builder.table_options(table_options);
855
856 if iceberg && builder.base_location.is_none() {
857 return Err(ParserError::ParserError(
858 "BASE_LOCATION is required for ICEBERG tables".to_string(),
859 ));
860 }
861
862 Ok(builder.build())
863}
864
865pub fn parse_create_database(
868 or_replace: bool,
869 transient: bool,
870 parser: &mut Parser,
871) -> Result<Statement, ParserError> {
872 let if_not_exists = parser.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
873 let name = parser.parse_object_name(false)?;
874
875 let mut builder = CreateDatabaseBuilder::new(name)
876 .or_replace(or_replace)
877 .transient(transient)
878 .if_not_exists(if_not_exists);
879
880 loop {
881 let next_token = parser.next_token();
882 match &next_token.token {
883 Token::Word(word) => match word.keyword {
884 Keyword::CLONE => {
885 builder = builder.clone_clause(Some(parser.parse_object_name(false)?));
886 }
887 Keyword::DATA_RETENTION_TIME_IN_DAYS => {
888 parser.expect_token(&Token::Eq)?;
889 builder =
890 builder.data_retention_time_in_days(Some(parser.parse_literal_uint()?));
891 }
892 Keyword::MAX_DATA_EXTENSION_TIME_IN_DAYS => {
893 parser.expect_token(&Token::Eq)?;
894 builder =
895 builder.max_data_extension_time_in_days(Some(parser.parse_literal_uint()?));
896 }
897 Keyword::EXTERNAL_VOLUME => {
898 parser.expect_token(&Token::Eq)?;
899 builder = builder.external_volume(Some(parser.parse_literal_string()?));
900 }
901 Keyword::CATALOG => {
902 parser.expect_token(&Token::Eq)?;
903 builder = builder.catalog(Some(parser.parse_literal_string()?));
904 }
905 Keyword::REPLACE_INVALID_CHARACTERS => {
906 parser.expect_token(&Token::Eq)?;
907 builder =
908 builder.replace_invalid_characters(Some(parser.parse_boolean_string()?));
909 }
910 Keyword::DEFAULT_DDL_COLLATION => {
911 parser.expect_token(&Token::Eq)?;
912 builder = builder.default_ddl_collation(Some(parser.parse_literal_string()?));
913 }
914 Keyword::STORAGE_SERIALIZATION_POLICY => {
915 parser.expect_token(&Token::Eq)?;
916 let policy = parse_storage_serialization_policy(parser)?;
917 builder = builder.storage_serialization_policy(Some(policy));
918 }
919 Keyword::COMMENT => {
920 parser.expect_token(&Token::Eq)?;
921 builder = builder.comment(Some(parser.parse_literal_string()?));
922 }
923 Keyword::CATALOG_SYNC => {
924 parser.expect_token(&Token::Eq)?;
925 builder = builder.catalog_sync(Some(parser.parse_literal_string()?));
926 }
927 Keyword::CATALOG_SYNC_NAMESPACE_FLATTEN_DELIMITER => {
928 parser.expect_token(&Token::Eq)?;
929 builder = builder.catalog_sync_namespace_flatten_delimiter(Some(
930 parser.parse_literal_string()?,
931 ));
932 }
933 Keyword::CATALOG_SYNC_NAMESPACE_MODE => {
934 parser.expect_token(&Token::Eq)?;
935 let mode =
936 match parser.parse_one_of_keywords(&[Keyword::NEST, Keyword::FLATTEN]) {
937 Some(Keyword::NEST) => CatalogSyncNamespaceMode::Nest,
938 Some(Keyword::FLATTEN) => CatalogSyncNamespaceMode::Flatten,
939 _ => {
940 return parser.expected("NEST or FLATTEN", next_token);
941 }
942 };
943 builder = builder.catalog_sync_namespace_mode(Some(mode));
944 }
945 Keyword::WITH => {
946 if parser.parse_keyword(Keyword::TAG) {
947 parser.expect_token(&Token::LParen)?;
948 let tags = parser.parse_comma_separated(Parser::parse_tag)?;
949 parser.expect_token(&Token::RParen)?;
950 builder = builder.with_tags(Some(tags));
951 } else if parser.parse_keyword(Keyword::CONTACT) {
952 parser.expect_token(&Token::LParen)?;
953 let contacts = parser.parse_comma_separated(|p| {
954 let purpose = p.parse_identifier()?.value;
955 p.expect_token(&Token::Eq)?;
956 let contact = p.parse_identifier()?.value;
957 Ok(ContactEntry { purpose, contact })
958 })?;
959 parser.expect_token(&Token::RParen)?;
960 builder = builder.with_contacts(Some(contacts));
961 } else {
962 return parser.expected("TAG or CONTACT", next_token);
963 }
964 }
965 _ => return parser.expected("end of statement", next_token),
966 },
967 Token::SemiColon | Token::EOF => break,
968 _ => return parser.expected("end of statement", next_token),
969 }
970 }
971 Ok(builder.build())
972}
973
974pub fn parse_storage_serialization_policy(
975 parser: &mut Parser,
976) -> Result<StorageSerializationPolicy, ParserError> {
977 let next_token = parser.next_token();
978 match &next_token.token {
979 Token::Word(w) => match w.keyword {
980 Keyword::COMPATIBLE => Ok(StorageSerializationPolicy::Compatible),
981 Keyword::OPTIMIZED => Ok(StorageSerializationPolicy::Optimized),
982 _ => parser.expected("storage_serialization_policy", next_token),
983 },
984 _ => parser.expected("storage_serialization_policy", next_token),
985 }
986}
987
988pub fn parse_create_stage(
989 or_replace: bool,
990 temporary: bool,
991 parser: &mut Parser,
992) -> Result<Statement, ParserError> {
993 let if_not_exists = parser.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
995 let name = parser.parse_object_name(false)?;
996 let mut directory_table_params = Vec::new();
997 let mut file_format = Vec::new();
998 let mut copy_options = Vec::new();
999 let mut comment = None;
1000
1001 let stage_params = parse_stage_params(parser)?;
1003
1004 if parser.parse_keyword(Keyword::DIRECTORY) {
1006 parser.expect_token(&Token::Eq)?;
1007 directory_table_params = parser.parse_key_value_options(true, &[])?;
1008 }
1009
1010 if parser.parse_keyword(Keyword::FILE_FORMAT) {
1012 parser.expect_token(&Token::Eq)?;
1013 file_format = parser.parse_key_value_options(true, &[])?;
1014 }
1015
1016 if parser.parse_keyword(Keyword::COPY_OPTIONS) {
1018 parser.expect_token(&Token::Eq)?;
1019 copy_options = parser.parse_key_value_options(true, &[])?;
1020 }
1021
1022 if parser.parse_keyword(Keyword::COMMENT) {
1024 parser.expect_token(&Token::Eq)?;
1025 comment = Some(parser.parse_comment_value()?);
1026 }
1027
1028 Ok(Statement::CreateStage {
1029 or_replace,
1030 temporary,
1031 if_not_exists,
1032 name,
1033 stage_params,
1034 directory_table_params: KeyValueOptions {
1035 options: directory_table_params,
1036 delimiter: KeyValueOptionsDelimiter::Space,
1037 },
1038 file_format: KeyValueOptions {
1039 options: file_format,
1040 delimiter: KeyValueOptionsDelimiter::Space,
1041 },
1042 copy_options: KeyValueOptions {
1043 options: copy_options,
1044 delimiter: KeyValueOptionsDelimiter::Space,
1045 },
1046 comment,
1047 })
1048}
1049
1050pub fn parse_stage_name_identifier(parser: &mut Parser) -> Result<Ident, ParserError> {
1051 let mut ident = String::new();
1052 while let Some(next_token) = parser.next_token_no_skip() {
1053 match &next_token.token {
1054 Token::Whitespace(_) | Token::SemiColon => break,
1055 Token::Period => {
1056 parser.prev_token();
1057 break;
1058 }
1059 Token::RParen => {
1060 parser.prev_token();
1061 break;
1062 }
1063 Token::AtSign => ident.push('@'),
1064 Token::Tilde => ident.push('~'),
1065 Token::Mod => ident.push('%'),
1066 Token::Div => ident.push('/'),
1067 Token::Plus => ident.push('+'),
1068 Token::Minus => ident.push('-'),
1069 Token::Number(n, _) => ident.push_str(n),
1070 Token::Word(w) => ident.push_str(&w.to_string()),
1071 _ => return parser.expected("stage name identifier", parser.peek_token()),
1072 }
1073 }
1074 Ok(Ident::new(ident))
1075}
1076
1077pub fn parse_snowflake_stage_name(parser: &mut Parser) -> Result<ObjectName, ParserError> {
1078 match parser.next_token().token {
1079 Token::AtSign => {
1080 parser.prev_token();
1081 let mut idents = vec![];
1082 loop {
1083 idents.push(parse_stage_name_identifier(parser)?);
1084 if !parser.consume_token(&Token::Period) {
1085 break;
1086 }
1087 }
1088 Ok(ObjectName::from(idents))
1089 }
1090 _ => {
1091 parser.prev_token();
1092 Ok(parser.parse_object_name(false)?)
1093 }
1094 }
1095}
1096
1097pub fn parse_copy_into(parser: &mut Parser) -> Result<Statement, ParserError> {
1100 let kind = match parser.peek_token().token {
1101 Token::AtSign => CopyIntoSnowflakeKind::Location,
1103 Token::SingleQuotedString(s) if s.contains("://") => CopyIntoSnowflakeKind::Location,
1105 _ => CopyIntoSnowflakeKind::Table,
1106 };
1107
1108 let mut files: Vec<String> = vec![];
1109 let mut from_transformations: Option<Vec<StageLoadSelectItemKind>> = None;
1110 let mut from_stage_alias = None;
1111 let mut from_stage = None;
1112 let mut stage_params = StageParamsObject {
1113 url: None,
1114 encryption: KeyValueOptions {
1115 options: vec![],
1116 delimiter: KeyValueOptionsDelimiter::Space,
1117 },
1118 endpoint: None,
1119 storage_integration: None,
1120 credentials: KeyValueOptions {
1121 options: vec![],
1122 delimiter: KeyValueOptionsDelimiter::Space,
1123 },
1124 };
1125 let mut from_query = None;
1126 let mut partition = None;
1127 let mut file_format = Vec::new();
1128 let mut pattern = None;
1129 let mut validation_mode = None;
1130 let mut copy_options = Vec::new();
1131
1132 let into: ObjectName = parse_snowflake_stage_name(parser)?;
1133 if kind == CopyIntoSnowflakeKind::Location {
1134 stage_params = parse_stage_params(parser)?;
1135 }
1136
1137 let into_columns = match &parser.peek_token().token {
1138 Token::LParen => Some(parser.parse_parenthesized_column_list(IsOptional::Optional, true)?),
1139 _ => None,
1140 };
1141
1142 parser.expect_keyword_is(Keyword::FROM)?;
1143 match parser.next_token().token {
1144 Token::LParen if kind == CopyIntoSnowflakeKind::Table => {
1145 parser.expect_keyword_is(Keyword::SELECT)?;
1147 from_transformations = parse_select_items_for_data_load(parser)?;
1148
1149 parser.expect_keyword_is(Keyword::FROM)?;
1150 from_stage = Some(parse_snowflake_stage_name(parser)?);
1151 stage_params = parse_stage_params(parser)?;
1152
1153 from_stage_alias = parser
1155 .maybe_parse_table_alias()?
1156 .map(|table_alias| table_alias.name);
1157 parser.expect_token(&Token::RParen)?;
1158 }
1159 Token::LParen if kind == CopyIntoSnowflakeKind::Location => {
1160 from_query = Some(parser.parse_query()?);
1162 parser.expect_token(&Token::RParen)?;
1163 }
1164 _ => {
1165 parser.prev_token();
1166 from_stage = Some(parse_snowflake_stage_name(parser)?);
1167 stage_params = parse_stage_params(parser)?;
1168
1169 from_stage_alias = if parser.parse_keyword(Keyword::AS) {
1171 Some(match parser.next_token().token {
1172 Token::Word(w) => Ok(Ident::new(w.value)),
1173 _ => parser.expected("stage alias", parser.peek_token()),
1174 }?)
1175 } else {
1176 None
1177 };
1178 }
1179 }
1180
1181 loop {
1182 if parser.parse_keyword(Keyword::FILE_FORMAT) {
1184 parser.expect_token(&Token::Eq)?;
1185 file_format = parser.parse_key_value_options(true, &[])?;
1186 } else if parser.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
1188 partition = Some(Box::new(parser.parse_expr()?))
1189 } else if parser.parse_keyword(Keyword::FILES) {
1191 parser.expect_token(&Token::Eq)?;
1192 parser.expect_token(&Token::LParen)?;
1193 let mut continue_loop = true;
1194 while continue_loop {
1195 continue_loop = false;
1196 let next_token = parser.next_token();
1197 match next_token.token {
1198 Token::SingleQuotedString(s) => files.push(s),
1199 _ => parser.expected("file token", next_token)?,
1200 };
1201 if parser.next_token().token.eq(&Token::Comma) {
1202 continue_loop = true;
1203 } else {
1204 parser.prev_token(); }
1206 }
1207 parser.expect_token(&Token::RParen)?;
1208 } else if parser.parse_keyword(Keyword::PATTERN) {
1210 parser.expect_token(&Token::Eq)?;
1211 let next_token = parser.next_token();
1212 pattern = Some(match next_token.token {
1213 Token::SingleQuotedString(s) => s,
1214 _ => parser.expected("pattern", next_token)?,
1215 });
1216 } else if parser.parse_keyword(Keyword::VALIDATION_MODE) {
1218 parser.expect_token(&Token::Eq)?;
1219 validation_mode = Some(parser.next_token().token.to_string());
1220 } else if parser.parse_keyword(Keyword::COPY_OPTIONS) {
1222 parser.expect_token(&Token::Eq)?;
1223 copy_options = parser.parse_key_value_options(true, &[])?;
1224 } else {
1225 match parser.next_token().token {
1226 Token::SemiColon | Token::EOF => break,
1227 Token::Comma => continue,
1228 Token::Word(key) => copy_options.push(parser.parse_key_value_option(key)?),
1231 _ => return parser.expected("another copy option, ; or EOF'", parser.peek_token()),
1232 }
1233 }
1234 }
1235
1236 Ok(Statement::CopyIntoSnowflake {
1237 kind,
1238 into,
1239 into_columns,
1240 from_obj: from_stage,
1241 from_obj_alias: from_stage_alias,
1242 stage_params,
1243 from_transformations,
1244 from_query,
1245 files: if files.is_empty() { None } else { Some(files) },
1246 pattern,
1247 file_format: KeyValueOptions {
1248 options: file_format,
1249 delimiter: KeyValueOptionsDelimiter::Space,
1250 },
1251 copy_options: KeyValueOptions {
1252 options: copy_options,
1253 delimiter: KeyValueOptionsDelimiter::Space,
1254 },
1255 validation_mode,
1256 partition,
1257 })
1258}
1259
1260fn parse_select_items_for_data_load(
1261 parser: &mut Parser,
1262) -> Result<Option<Vec<StageLoadSelectItemKind>>, ParserError> {
1263 let mut select_items: Vec<StageLoadSelectItemKind> = vec![];
1264 loop {
1265 match parser.maybe_parse(parse_select_item_for_data_load)? {
1266 Some(item) => select_items.push(StageLoadSelectItemKind::StageLoadSelectItem(item)),
1268 None => select_items.push(StageLoadSelectItemKind::SelectItem(
1270 parser.parse_select_item()?,
1271 )),
1272 }
1273 if matches!(parser.peek_token_ref().token, Token::Comma) {
1274 parser.advance_token();
1275 } else {
1276 break;
1277 }
1278 }
1279 Ok(Some(select_items))
1280}
1281
1282fn parse_select_item_for_data_load(
1283 parser: &mut Parser,
1284) -> Result<StageLoadSelectItem, ParserError> {
1285 let mut alias: Option<Ident> = None;
1286 let mut file_col_num: i32 = 0;
1287 let mut element: Option<Ident> = None;
1288 let mut item_as: Option<Ident> = None;
1289
1290 let next_token = parser.next_token();
1291 match next_token.token {
1292 Token::Placeholder(w) => {
1293 file_col_num = w.to_string().split_off(1).parse::<i32>().map_err(|e| {
1294 ParserError::ParserError(format!("Could not parse '{w}' as i32: {e}"))
1295 })?;
1296 Ok(())
1297 }
1298 Token::Word(w) => {
1299 alias = Some(Ident::new(w.value));
1300 Ok(())
1301 }
1302 _ => parser.expected("alias or file_col_num", next_token),
1303 }?;
1304
1305 if alias.is_some() {
1306 parser.expect_token(&Token::Period)?;
1307 let col_num_token = parser.next_token();
1309 match col_num_token.token {
1310 Token::Placeholder(w) => {
1311 file_col_num = w.to_string().split_off(1).parse::<i32>().map_err(|e| {
1312 ParserError::ParserError(format!("Could not parse '{w}' as i32: {e}"))
1313 })?;
1314 Ok(())
1315 }
1316 _ => parser.expected("file_col_num", col_num_token),
1317 }?;
1318 }
1319
1320 match parser.next_token().token {
1322 Token::Colon => {
1323 element = Some(Ident::new(match parser.next_token().token {
1325 Token::Word(w) => Ok(w.value),
1326 _ => parser.expected("file_col_num", parser.peek_token()),
1327 }?));
1328 }
1329 _ => {
1330 parser.prev_token();
1332 }
1333 }
1334
1335 if parser.parse_keyword(Keyword::AS) {
1337 item_as = Some(match parser.next_token().token {
1338 Token::Word(w) => Ok(Ident::new(w.value)),
1339 _ => parser.expected("column item alias", parser.peek_token()),
1340 }?);
1341 }
1342
1343 Ok(StageLoadSelectItem {
1344 alias,
1345 file_col_num,
1346 element,
1347 item_as,
1348 })
1349}
1350
1351fn parse_stage_params(parser: &mut Parser) -> Result<StageParamsObject, ParserError> {
1352 let (mut url, mut storage_integration, mut endpoint) = (None, None, None);
1353 let mut encryption: KeyValueOptions = KeyValueOptions {
1354 options: vec![],
1355 delimiter: KeyValueOptionsDelimiter::Space,
1356 };
1357 let mut credentials: KeyValueOptions = KeyValueOptions {
1358 options: vec![],
1359 delimiter: KeyValueOptionsDelimiter::Space,
1360 };
1361
1362 if parser.parse_keyword(Keyword::URL) {
1364 parser.expect_token(&Token::Eq)?;
1365 url = Some(match parser.next_token().token {
1366 Token::SingleQuotedString(word) => Ok(word),
1367 _ => parser.expected("a URL statement", parser.peek_token()),
1368 }?)
1369 }
1370
1371 if parser.parse_keyword(Keyword::STORAGE_INTEGRATION) {
1373 parser.expect_token(&Token::Eq)?;
1374 storage_integration = Some(parser.next_token().token.to_string());
1375 }
1376
1377 if parser.parse_keyword(Keyword::ENDPOINT) {
1379 parser.expect_token(&Token::Eq)?;
1380 endpoint = Some(match parser.next_token().token {
1381 Token::SingleQuotedString(word) => Ok(word),
1382 _ => parser.expected("an endpoint statement", parser.peek_token()),
1383 }?)
1384 }
1385
1386 if parser.parse_keyword(Keyword::CREDENTIALS) {
1388 parser.expect_token(&Token::Eq)?;
1389 credentials = KeyValueOptions {
1390 options: parser.parse_key_value_options(true, &[])?,
1391 delimiter: KeyValueOptionsDelimiter::Space,
1392 };
1393 }
1394
1395 if parser.parse_keyword(Keyword::ENCRYPTION) {
1397 parser.expect_token(&Token::Eq)?;
1398 encryption = KeyValueOptions {
1399 options: parser.parse_key_value_options(true, &[])?,
1400 delimiter: KeyValueOptionsDelimiter::Space,
1401 };
1402 }
1403
1404 Ok(StageParamsObject {
1405 url,
1406 encryption,
1407 endpoint,
1408 storage_integration,
1409 credentials,
1410 })
1411}
1412
1413fn parse_session_options(
1418 parser: &mut Parser,
1419 set: bool,
1420) -> Result<Vec<KeyValueOption>, ParserError> {
1421 let mut options: Vec<KeyValueOption> = Vec::new();
1422 let empty = String::new;
1423 loop {
1424 let next_token = parser.peek_token();
1425 match next_token.token {
1426 Token::SemiColon | Token::EOF => break,
1427 Token::Comma => {
1428 parser.advance_token();
1429 continue;
1430 }
1431 Token::Word(key) => {
1432 parser.advance_token();
1433 if set {
1434 let option = parser.parse_key_value_option(key)?;
1435 options.push(option);
1436 } else {
1437 options.push(KeyValueOption {
1438 option_name: key.value,
1439 option_type: KeyValueOptionType::STRING,
1440 value: empty(),
1441 });
1442 }
1443 }
1444 _ => {
1445 return parser.expected("another option or end of statement", next_token);
1446 }
1447 }
1448 }
1449 if options.is_empty() {
1450 Err(ParserError::ParserError(
1451 "expected at least one option".to_string(),
1452 ))
1453 } else {
1454 Ok(options)
1455 }
1456}
1457
1458fn parse_identity_property(parser: &mut Parser) -> Result<IdentityProperty, ParserError> {
1465 let parameters = if parser.consume_token(&Token::LParen) {
1466 let seed = parser.parse_number()?;
1467 parser.expect_token(&Token::Comma)?;
1468 let increment = parser.parse_number()?;
1469 parser.expect_token(&Token::RParen)?;
1470
1471 Some(IdentityPropertyFormatKind::FunctionCall(
1472 IdentityParameters { seed, increment },
1473 ))
1474 } else if parser.parse_keyword(Keyword::START) {
1475 let seed = parser.parse_number()?;
1476 parser.expect_keyword_is(Keyword::INCREMENT)?;
1477 let increment = parser.parse_number()?;
1478
1479 Some(IdentityPropertyFormatKind::StartAndIncrement(
1480 IdentityParameters { seed, increment },
1481 ))
1482 } else {
1483 None
1484 };
1485 let order = match parser.parse_one_of_keywords(&[Keyword::ORDER, Keyword::NOORDER]) {
1486 Some(Keyword::ORDER) => Some(IdentityPropertyOrder::Order),
1487 Some(Keyword::NOORDER) => Some(IdentityPropertyOrder::NoOrder),
1488 _ => None,
1489 };
1490 Ok(IdentityProperty { parameters, order })
1491}
1492
1493fn parse_column_policy_property(
1500 parser: &mut Parser,
1501 with: bool,
1502) -> Result<ColumnPolicyProperty, ParserError> {
1503 let policy_name = parser.parse_object_name(false)?;
1504 let using_columns = if parser.parse_keyword(Keyword::USING) {
1505 parser.expect_token(&Token::LParen)?;
1506 let columns = parser.parse_comma_separated(|p| p.parse_identifier())?;
1507 parser.expect_token(&Token::RParen)?;
1508 Some(columns)
1509 } else {
1510 None
1511 };
1512
1513 Ok(ColumnPolicyProperty {
1514 with,
1515 policy_name,
1516 using_columns,
1517 })
1518}
1519
1520fn parse_column_tags(parser: &mut Parser, with: bool) -> Result<TagsColumnOption, ParserError> {
1527 parser.expect_token(&Token::LParen)?;
1528 let tags = parser.parse_comma_separated(Parser::parse_tag)?;
1529 parser.expect_token(&Token::RParen)?;
1530
1531 Ok(TagsColumnOption { with, tags })
1532}
1533
1534fn parse_show_objects(terse: bool, parser: &mut Parser) -> Result<Statement, ParserError> {
1537 let show_options = parser.parse_show_stmt_options()?;
1538 Ok(Statement::ShowObjects(ShowObjects {
1539 terse,
1540 show_options,
1541 }))
1542}