use itertools::Itertools;
use sqruff_lib_core::dialects::Dialect;
use sqruff_lib_core::dialects::init::DialectConfig;
use sqruff_lib_core::dialects::init::DialectKind;
use sqruff_lib_core::dialects::syntax::SyntaxKind;
use sqruff_lib_core::helpers::{Config, ToMatchable};
use sqruff_lib_core::parser::grammar::anyof::{AnyNumberOf, one_of, optionally_bracketed};
use sqruff_lib_core::parser::grammar::delimited::Delimited;
use sqruff_lib_core::parser::grammar::sequence::{Bracketed, Sequence};
use sqruff_lib_core::parser::grammar::{Nothing, Ref};
use sqruff_lib_core::parser::lexer::Matcher;
use sqruff_lib_core::parser::matchable::MatchableTrait;
use sqruff_lib_core::parser::node_matcher::NodeMatcher;
use sqruff_lib_core::parser::parsers::{RegexParser, StringParser, TypedParser};
use sqruff_lib_core::parser::segments::generator::SegmentGenerator;
use sqruff_lib_core::parser::segments::meta::MetaSegment;
use sqruff_lib_core::value::Value;
sqruff_lib_core::dialect_config!(AthenaDialectConfig {});
pub fn dialect(config: Option<&Value>) -> Dialect {
let _dialect_config: AthenaDialectConfig = config
.map(AthenaDialectConfig::from_value)
.unwrap_or_default();
let ansi_dialect = super::ansi::dialect(None);
let mut dialect = super::ansi::raw_dialect();
dialect.name = DialectKind::Athena;
dialect
.sets_mut("unreserved_keywords")
.extend(super::athena_keywords::ATHENA_UNRESERVED_KEYWORDS);
dialect
.sets_mut("reserved_keywords")
.extend(super::athena_keywords::ATHENA_RESERVED_KEYWORDS);
dialect.insert_lexer_matchers(
vec![Matcher::string("right_arrow", "->", SyntaxKind::RightArrow)],
"like_operator",
);
dialect
.bracket_sets_mut("angle_bracket_pairs")
.extend(vec![(
"angle",
"StartAngleBracketSegment",
"EndAngleBracketSegment",
false,
)]);
dialect.add([(
"NonWithSelectableGrammar".into(),
one_of(vec![
Ref::new("SetExpressionSegment").to_matchable(),
optionally_bracketed(vec![Ref::new("SelectStatementSegment").to_matchable()])
.to_matchable(),
Ref::new("NonSetSelectableGrammar").to_matchable(),
Ref::new("UpdateStatementSegment").to_matchable(),
Ref::new("InsertStatementSegment").to_matchable(),
Ref::new("DeleteStatementSegment").to_matchable(),
Ref::new("MergeStatementSegment").to_matchable(),
])
.to_matchable()
.into(),
)]);
dialect.add([
(
"StartAngleBracketSegment".into(),
StringParser::new("<", SyntaxKind::StartAngleBracket)
.to_matchable()
.into(),
),
(
"EndAngleBracketSegment".into(),
StringParser::new(">", SyntaxKind::EndAngleBracket)
.to_matchable()
.into(),
),
(
"RightArrowOperator".into(),
StringParser::new("->", SyntaxKind::BinaryOperator)
.to_matchable()
.into(),
),
(
"JSONFILE".into(),
StringParser::new("JSONFILE", SyntaxKind::FileFormat)
.to_matchable()
.into(),
),
(
"RCFILE".into(),
StringParser::new("RCFILE", SyntaxKind::FileFormat)
.to_matchable()
.into(),
),
(
"ORC".into(),
StringParser::new("ORCFILE", SyntaxKind::FileFormat)
.to_matchable()
.into(),
),
(
"PARQUET".into(),
StringParser::new("PARQUETFILE", SyntaxKind::FileFormat)
.to_matchable()
.into(),
),
(
"AVRO".into(),
StringParser::new("AVROFILE", SyntaxKind::FileFormat)
.to_matchable()
.into(),
),
(
"ION".into(),
StringParser::new("IONFILE", SyntaxKind::FileFormat)
.to_matchable()
.into(),
),
(
"SEQUENCEFILE".into(),
StringParser::new("SEQUENCEFILE", SyntaxKind::FileFormat)
.to_matchable()
.into(),
),
(
"TEXTFILE".into(),
StringParser::new("TEXTFILE", SyntaxKind::FileFormat)
.to_matchable()
.into(),
),
(
"PropertyGrammar".into(),
Sequence::new(vec![
Ref::new("QuotedLiteralSegment").to_matchable(),
Ref::new("EqualsSegment").to_matchable(),
Ref::new("QuotedLiteralSegment").to_matchable(),
])
.to_matchable()
.into(),
),
(
"LocationGrammar".into(),
Sequence::new(vec![
Ref::keyword("LOCATION").to_matchable(),
Ref::new("QuotedLiteralSegment").to_matchable(),
])
.to_matchable()
.into(),
),
(
"BracketedPropertyListGrammar".into(),
Bracketed::new(vec![
Delimited::new(vec![Ref::new("PropertyGrammar").to_matchable()]).to_matchable(),
])
.to_matchable()
.into(),
),
(
"CTASPropertyGrammar".into(),
Sequence::new(vec![
one_of(vec![
Ref::keyword("FORMAT").to_matchable(),
Ref::keyword("PARTITIONED_BY").to_matchable(),
Ref::keyword("BUCKETED_BY").to_matchable(),
Ref::keyword("BUCKET_COUNT").to_matchable(),
Ref::keyword("WRITE_COMPRESSION").to_matchable(),
Ref::keyword("ORC_COMPRESSION").to_matchable(),
Ref::keyword("PARQUET_COMPRESSION").to_matchable(),
Ref::keyword("COMPRESSION_LEVEL").to_matchable(),
Ref::keyword("FIELD_DELIMITER").to_matchable(),
Ref::keyword("IS_EXTERNAL").to_matchable(),
Ref::keyword("TABLE_TYPE").to_matchable(),
Ref::keyword("EXTERNAL_LOCATION").to_matchable(),
])
.to_matchable(),
Ref::new("EqualsSegment").to_matchable(),
Ref::new("LiteralGrammar").to_matchable(),
])
.to_matchable()
.into(),
),
(
"CTASIcebergPropertyGrammar".into(),
Sequence::new(vec![
one_of(vec![
Ref::keyword("FORMAT").to_matchable(),
Ref::keyword("PARTITIONED_BY").to_matchable(),
Ref::keyword("BUCKETED_BY").to_matchable(),
Ref::keyword("BUCKET_COUNT").to_matchable(),
Ref::keyword("WRITE_COMPRESSION").to_matchable(),
Ref::keyword("ORC_COMPRESSION").to_matchable(),
Ref::keyword("PARQUET_COMPRESSION").to_matchable(),
Ref::keyword("COMPRESSION_LEVEL").to_matchable(),
Ref::keyword("FIELD_DELIMITER").to_matchable(),
Ref::keyword("IS_EXTERNAL").to_matchable(),
Ref::keyword("TABLE_TYPE").to_matchable(),
Ref::keyword("LOCATION").to_matchable(),
Ref::keyword("PARTITIONING").to_matchable(),
Ref::keyword("VACUUM_MAX_SNAPSHOT_AGE_SECONDS").to_matchable(),
Ref::keyword("VACUUM_MIN_SNAPSHOTS_TO_KEEP").to_matchable(),
Ref::keyword("OPTIMIZE_REWRITE_MIN_DATA_FILE_SIZE_BYTES").to_matchable(),
Ref::keyword("OPTIMIZE_REWRITE_MAX_DATA_FILE_SIZE_BYTES").to_matchable(),
Ref::keyword("OPTIMIZE_REWRITE_DATA_FILE_THRESHOLD").to_matchable(),
Ref::keyword("OPTIMIZE_REWRITE_DELETE_FILE_THRESHOLD").to_matchable(),
])
.to_matchable(),
Ref::new("EqualsSegment").to_matchable(),
Ref::new("LiteralGrammar").to_matchable(),
])
.to_matchable()
.into(),
),
(
"BracketedCTASPropertyGrammar".into(),
Bracketed::new(vec![
one_of(vec![
Delimited::new(vec![Ref::new("CTASPropertyGrammar").to_matchable()])
.to_matchable(),
Delimited::new(vec![Ref::new("CTASIcebergPropertyGrammar").to_matchable()])
.to_matchable(),
])
.to_matchable(),
])
.to_matchable()
.into(),
),
(
"UnloadPropertyGrammar".into(),
Sequence::new(vec![
one_of(vec![
Ref::keyword("FORMAT").to_matchable(),
Ref::keyword("PARTITIONED_BY").to_matchable(),
Ref::keyword("COMPRESSION").to_matchable(),
Ref::keyword("FIELD_DELIMITER").to_matchable(),
])
.to_matchable(),
Ref::new("EqualsSegment").to_matchable(),
Ref::new("LiteralGrammar").to_matchable(),
])
.to_matchable()
.into(),
),
(
"BracketedUnloadPropertyGrammar".into(),
Bracketed::new(vec![
Delimited::new(vec![Ref::new("UnloadPropertyGrammar").to_matchable()])
.to_matchable(),
])
.to_matchable()
.into(),
),
(
"TablePropertiesGrammar".into(),
Sequence::new(vec![
Ref::keyword("TBLPROPERTIES").to_matchable(),
Ref::new("BracketedPropertyListGrammar").to_matchable(),
])
.to_matchable()
.into(),
),
(
"SerdePropertiesGrammar".into(),
Sequence::new(vec![
Ref::keyword("WITH").to_matchable(),
Ref::keyword("SERDEPROPERTIES").to_matchable(),
Ref::new("BracketedPropertyListGrammar").to_matchable(),
])
.to_matchable()
.into(),
),
(
"TerminatedByGrammar".into(),
Sequence::new(vec![
Ref::keyword("TERMINATED").to_matchable(),
Ref::keyword("BY").to_matchable(),
Ref::new("QuotedLiteralSegment").to_matchable(),
])
.to_matchable()
.into(),
),
(
"FileFormatGrammar".into(),
one_of(vec![
Ref::keyword("SEQUENCEFILE").to_matchable(),
Ref::keyword("TEXTFILE").to_matchable(),
Ref::keyword("RCFILE").to_matchable(),
Ref::keyword("ORC").to_matchable(),
Ref::keyword("PARQUET").to_matchable(),
Ref::keyword("AVRO").to_matchable(),
Ref::keyword("JSONFILE").to_matchable(),
Ref::keyword("ION").to_matchable(),
Sequence::new(vec![
Ref::keyword("INPUTFORMAT").to_matchable(),
Ref::new("QuotedLiteralSegment").to_matchable(),
Ref::keyword("OUTPUTFORMAT").to_matchable(),
Ref::new("QuotedLiteralSegment").to_matchable(),
])
.to_matchable(),
])
.to_matchable()
.into(),
),
(
"StoredAsGrammar".into(),
Sequence::new(vec![
Ref::keyword("STORED").to_matchable(),
Ref::keyword("AS").to_matchable(),
Ref::new("FileFormatGrammar").to_matchable(),
])
.to_matchable()
.into(),
),
(
"StoredByGrammar".into(),
Sequence::new(vec![
Ref::keyword("STORED").to_matchable(),
Ref::keyword("BY").to_matchable(),
Ref::new("QuotedLiteralSegment").to_matchable(),
Ref::new("SerdePropertiesGrammar").optional().to_matchable(),
])
.to_matchable()
.into(),
),
(
"StorageFormatGrammar".into(),
one_of(vec![
Sequence::new(vec![
Ref::new("RowFormatClauseSegment").optional().to_matchable(),
Ref::new("StoredAsGrammar").optional().to_matchable(),
])
.to_matchable(),
Ref::new("StoredByGrammar").to_matchable(),
])
.to_matchable()
.into(),
),
(
"CommentGrammar".into(),
Sequence::new(vec![
Ref::keyword("COMMENT").to_matchable(),
Ref::new("QuotedLiteralSegment").to_matchable(),
])
.to_matchable()
.into(),
),
(
"PartitionSpecGrammar".into(),
Sequence::new(vec![
Ref::keyword("PARTITION").to_matchable(),
Bracketed::new(vec![
Delimited::new(vec![
Sequence::new(vec![
Ref::new("ColumnReferenceSegment").to_matchable(),
Sequence::new(vec![
Ref::new("EqualsSegment").to_matchable(),
Ref::new("LiteralGrammar").to_matchable(),
])
.config(|config| {
config.optional();
})
.to_matchable(),
])
.to_matchable(),
])
.to_matchable(),
])
.to_matchable(),
])
.to_matchable()
.into(),
),
(
"BackQuotedIdentifierSegment".into(),
TypedParser::new(SyntaxKind::BackQuote, SyntaxKind::QuotedIdentifier)
.to_matchable()
.into(),
),
]);
dialect.add([
(
"LiteralGrammar".into(),
ansi_dialect
.grammar("LiteralGrammar")
.copy(
Some(vec![Ref::new("ParameterSegment").to_matchable()]),
None,
None,
None,
Vec::new(),
false,
)
.into(),
),
(
"AccessorGrammar".into(),
Sequence::new(vec![
AnyNumberOf::new(vec![Ref::new("ArrayAccessorSegment").to_matchable()])
.config(|config| {
config.optional();
})
.to_matchable(),
AnyNumberOf::new(vec![
Sequence::new(vec![
Ref::new("ObjectReferenceDelimiterGrammar").to_matchable(),
Ref::new("ObjectReferenceSegment").to_matchable(),
])
.to_matchable(),
])
.config(|config| {
config.optional();
})
.to_matchable(),
])
.to_matchable()
.into(),
),
(
"QuotedLiteralSegment".into(),
one_of(vec![
TypedParser::new(SyntaxKind::SingleQuote, SyntaxKind::QuotedLiteral).to_matchable(),
TypedParser::new(SyntaxKind::DoubleQuote, SyntaxKind::QuotedLiteral).to_matchable(),
TypedParser::new(SyntaxKind::BackQuote, SyntaxKind::QuotedLiteral).to_matchable(),
])
.to_matchable()
.into(),
),
(
"TrimParametersGrammar".into(),
Nothing::new().to_matchable().into(),
),
(
"NakedIdentifierSegment".into(),
SegmentGenerator::new(|dialect| {
let reserved_keywords = dialect.sets("reserved_keywords");
let pattern = reserved_keywords.iter().join("|");
let anti_template = format!("^({pattern})$");
RegexParser::new("[A-Z0-9_]*[A-Z_][A-Z0-9_]*", SyntaxKind::NakedIdentifier)
.anti_template(&anti_template)
.to_matchable()
})
.into(),
),
(
"SingleIdentifierGrammar".into(),
ansi_dialect
.grammar("SingleIdentifierGrammar")
.copy(
Some(vec![Ref::new("BackQuotedIdentifierSegment").to_matchable()]),
None,
None,
None,
Vec::new(),
false,
)
.into(),
),
(
"BinaryOperatorGrammar".into(),
one_of(vec![
Ref::new("ArithmeticBinaryOperatorGrammar").to_matchable(),
Ref::new("StringBinaryOperatorGrammar").to_matchable(),
Ref::new("BooleanBinaryOperatorGrammar").to_matchable(),
Ref::new("ComparisonOperatorGrammar").to_matchable(),
Ref::new("RightArrowOperator").to_matchable(),
])
.to_matchable()
.into(),
),
(
"PostFunctionGrammar".into(),
ansi_dialect
.grammar("PostFunctionGrammar")
.copy(
Some(vec![
Sequence::new(vec![
Ref::keyword("WITH").to_matchable(),
Ref::keyword("ORDINALITY").to_matchable(),
])
.config(|config| config.optional())
.to_matchable(),
Ref::new("WithinGroupClauseSegment").to_matchable(),
]),
None,
None,
None,
Vec::new(),
false,
)
.into(),
),
(
"FunctionContentsGrammar".into(),
ansi_dialect
.grammar("FunctionContentsGrammar")
.copy(
Some(vec![
Ref::new("ListaggOverflowClauseSegment").to_matchable(),
]),
None,
None,
None,
Vec::new(),
false,
)
.into(),
),
]);
dialect.add([
(
"WithinGroupClauseSegment".into(),
Sequence::new(vec![
Ref::keyword("WITHIN").to_matchable(),
Ref::keyword("GROUP").to_matchable(),
Bracketed::new(vec![Ref::new("OrderByClauseSegment").to_matchable()])
.to_matchable(),
Ref::new("FilterClauseGrammar").optional().to_matchable(),
])
.to_matchable()
.into(),
),
(
"ListaggOverflowClauseSegment".into(),
Sequence::new(vec![
Ref::keyword("ON").to_matchable(),
Ref::keyword("OVERFLOW").to_matchable(),
one_of(vec![
Ref::keyword("ERROR").to_matchable(),
Sequence::new(vec![
Ref::keyword("TRUNCATE").to_matchable(),
Ref::new("QuotedLiteralSegment").optional().to_matchable(),
one_of(vec![
Ref::keyword("WITH").to_matchable(),
Ref::keyword("WITHOUT").to_matchable(),
])
.config(|config| {
config.optional();
})
.to_matchable(),
Ref::keyword("COUNT").optional().to_matchable(),
])
.to_matchable(),
])
.to_matchable(),
])
.to_matchable()
.into(),
),
(
"ValuesClauseSegment".into(),
NodeMatcher::new(SyntaxKind::ValuesClause, |_| {
Sequence::new(vec![
Ref::keyword("VALUES").to_matchable(),
Delimited::new(vec![Ref::new("ExpressionSegment").to_matchable()])
.to_matchable(),
])
.to_matchable()
})
.to_matchable()
.into(),
),
]);
dialect.replace_grammar(
"ArrayTypeSegment",
Sequence::new(vec![
Ref::keyword("ARRAY").to_matchable(),
Ref::new("ArrayTypeSchemaSegment").optional().to_matchable(),
])
.to_matchable(),
);
dialect.replace_grammar(
"ArrayTypeSchemaSegment",
Bracketed::new(vec![Ref::new("DatatypeSegment").to_matchable()])
.config(|config| {
config.bracket_pairs_set = "angle_bracket_pairs";
config.bracket_type = "angle";
})
.to_matchable(),
);
dialect.replace_grammar(
"StructTypeSegment",
Sequence::new(vec![
Ref::keyword("STRUCT").to_matchable(),
Ref::new("StructTypeSchemaSegment")
.optional()
.to_matchable(),
])
.to_matchable(),
);
dialect.add([
(
"MapTypeSegment".into(),
NodeMatcher::new(SyntaxKind::MapType, |_| {
Sequence::new(vec![
Ref::keyword("MAP").to_matchable(),
Ref::new("MapTypeSchemaSegment").optional().to_matchable(),
])
.to_matchable()
})
.to_matchable()
.into(),
),
(
"MapTypeSchemaSegment".into(),
NodeMatcher::new(SyntaxKind::MapTypeSchema, |_| {
Bracketed::new(vec![
Sequence::new(vec![
Ref::new("PrimitiveTypeSegment").to_matchable(),
Ref::new("CommaSegment").to_matchable(),
Ref::new("DatatypeSegment").to_matchable(),
])
.to_matchable(),
])
.config(|config| {
config.bracket_pairs_set = "angle_bracket_pairs";
config.bracket_type = "angle";
})
.to_matchable()
})
.to_matchable()
.into(),
),
]);
dialect.replace_grammar(
"StatementSegment",
super::ansi::statement_segment().copy(
Some(vec![
Ref::new("MsckRepairTableStatementSegment").to_matchable(),
Ref::new("UnloadStatementSegment").to_matchable(),
Ref::new("PrepareStatementSegment").to_matchable(),
Ref::new("ExecuteStatementSegment").to_matchable(),
Ref::new("ShowStatementSegment").to_matchable(),
]),
None,
None,
Some(vec![
Ref::new("TransactionStatementSegment").to_matchable(),
Ref::new("CreateSchemaStatementSegment").to_matchable(),
Ref::new("SetSchemaStatementSegment").to_matchable(),
Ref::new("CreateModelStatementSegment").to_matchable(),
Ref::new("DropModelStatementSegment").to_matchable(),
]),
Vec::new(),
false,
),
);
dialect.add([
(
"StructTypeSchemaSegment".into(),
NodeMatcher::new(SyntaxKind::StructTypeSchema, |_| {
Bracketed::new(vec![
Delimited::new(vec![
Sequence::new(vec![
Ref::new("NakedIdentifierSegment").to_matchable(),
Ref::new("ColonSegment").to_matchable(),
Ref::new("DatatypeSegment").to_matchable(),
Ref::new("CommentGrammar").optional().to_matchable(),
])
.to_matchable(),
])
.to_matchable(),
])
.config(|config| {
config.bracket_pairs_set = "angle_bracket_pairs";
config.bracket_type = "angle";
})
.to_matchable()
})
.to_matchable()
.into(),
),
(
"PrimitiveTypeSegment".into(),
NodeMatcher::new(SyntaxKind::PrimitiveType, |_| {
one_of(vec![
Ref::keyword("BOOLEAN").to_matchable(),
Ref::keyword("TINYINT").to_matchable(),
Ref::keyword("SMALLINT").to_matchable(),
Ref::keyword("INTEGER").to_matchable(),
Ref::keyword("INT").to_matchable(),
Ref::keyword("BIGINT").to_matchable(),
Ref::keyword("DOUBLE").to_matchable(),
Ref::keyword("FLOAT").to_matchable(),
Ref::keyword("REAL").to_matchable(),
Sequence::new(vec![
one_of(vec![
Ref::keyword("DECIMAL").to_matchable(),
Ref::keyword("CHAR").to_matchable(),
Ref::keyword("VARCHAR").to_matchable(),
])
.to_matchable(),
Ref::new("BracketedArguments").optional().to_matchable(),
])
.to_matchable(),
Ref::keyword("STRING").to_matchable(),
Ref::keyword("BINARY").to_matchable(),
Ref::keyword("DATE").to_matchable(),
Ref::keyword("TIMESTAMP").to_matchable(),
Ref::keyword("VARBINARY").to_matchable(),
Ref::keyword("JSON").to_matchable(),
Ref::keyword("TIME").to_matchable(),
Ref::keyword("IPADDRESS").to_matchable(),
Ref::keyword("HYPERLOGLOG").to_matchable(),
Ref::keyword("P4HYPERLOGLOG").to_matchable(),
])
.to_matchable()
})
.to_matchable()
.into(),
),
(
"DatatypeSegment".into(),
NodeMatcher::new(SyntaxKind::DataType, |_| {
one_of(vec![
Ref::new("PrimitiveTypeSegment").to_matchable(),
Ref::new("StructTypeSegment").to_matchable(),
Ref::new("ArrayTypeSegment").to_matchable(),
Ref::new("MapTypeSegment").to_matchable(),
Sequence::new(vec![
Ref::keyword("ROW").to_matchable(),
Bracketed::new(vec![
Delimited::new(vec![
AnyNumberOf::new(vec![
Sequence::new(vec![
Ref::new("NakedIdentifierSegment").to_matchable(),
Ref::new("DatatypeSegment").to_matchable(),
])
.to_matchable(),
Ref::new("LiteralGrammar").to_matchable(),
])
.to_matchable(),
])
.to_matchable(),
])
.to_matchable(),
])
.to_matchable(),
Ref::new("TimeWithTZGrammar").to_matchable(),
])
.to_matchable()
})
.to_matchable()
.into(),
),
]);
dialect.replace_grammar(
"GroupByClauseSegment",
Sequence::new(vec![
Ref::keyword("GROUP").to_matchable(),
Ref::keyword("BY").to_matchable(),
MetaSegment::indent().to_matchable(),
Delimited::new(vec![
one_of(vec![
Ref::new("CubeRollupClauseSegment").to_matchable(),
Ref::new("GroupingSetsClauseSegment").to_matchable(),
Ref::new("ColumnReferenceSegment").to_matchable(),
Ref::new("NumericLiteralSegment").to_matchable(),
Ref::new("ExpressionSegment").to_matchable(),
])
.to_matchable(),
])
.to_matchable(),
MetaSegment::dedent().to_matchable(),
])
.to_matchable(),
);
dialect.add([
(
"CreateTableStatementSegment".into(),
NodeMatcher::new(SyntaxKind::CreateTableStatement, |_| {
Sequence::new(vec![
Ref::keyword("CREATE").to_matchable(),
Ref::keyword("EXTERNAL").optional().to_matchable(),
Ref::keyword("TABLE").to_matchable(),
Ref::new("IfNotExistsGrammar").optional().to_matchable(),
Ref::new("TableReferenceSegment").to_matchable(),
one_of(vec![
Sequence::new(vec![
Bracketed::new(vec![
Delimited::new(vec![
one_of(vec![
Ref::new("TableConstraintSegment")
.optional()
.to_matchable(),
Sequence::new(vec![
Ref::new("ColumnDefinitionSegment").to_matchable(),
Ref::new("CommentGrammar").optional().to_matchable(),
])
.to_matchable(),
])
.to_matchable(),
])
.to_matchable(),
])
.config(|config| {
config.optional();
})
.to_matchable(),
Ref::new("CommentGrammar").optional().to_matchable(),
Ref::new("StoredAsGrammar").optional().to_matchable(),
Sequence::new(vec![
Ref::keyword("PARTITIONED").to_matchable(),
Ref::keyword("BY").to_matchable(),
Bracketed::new(vec![
Delimited::new(vec![
Sequence::new(vec![
one_of(vec![
Ref::new("ColumnDefinitionSegment").to_matchable(),
Ref::new("SingleIdentifierGrammar").to_matchable(),
Ref::new("FunctionSegment").to_matchable(),
])
.to_matchable(),
Ref::new("CommentGrammar").optional().to_matchable(),
])
.to_matchable(),
])
.to_matchable(),
])
.to_matchable(),
])
.config(|config| {
config.optional();
})
.to_matchable(),
Sequence::new(vec![
Ref::keyword("CLUSTERED").to_matchable(),
Ref::keyword("BY").to_matchable(),
Ref::new("BracketedColumnReferenceListGrammar").to_matchable(),
Ref::keyword("INTO").to_matchable(),
Ref::new("NumericLiteralSegment").to_matchable(),
Ref::keyword("BUCKETS").to_matchable(),
])
.config(|config| {
config.optional();
})
.to_matchable(),
Ref::new("StoredAsGrammar").optional().to_matchable(),
Ref::new("StorageFormatGrammar").optional().to_matchable(),
Ref::new("LocationGrammar").optional().to_matchable(),
Ref::new("TablePropertiesGrammar").optional().to_matchable(),
Ref::new("CommentGrammar").optional().to_matchable(),
])
.to_matchable(),
Sequence::new(vec![
Sequence::new(vec![
Ref::keyword("WITH").to_matchable(),
Ref::new("BracketedCTASPropertyGrammar").to_matchable(),
])
.config(|config| {
config.optional();
})
.to_matchable(),
Ref::keyword("AS").to_matchable(),
optionally_bracketed(vec![
Ref::new("SelectableGrammar").to_matchable(),
])
.to_matchable(),
Sequence::new(vec![
Ref::keyword("WITH").to_matchable(),
Ref::keyword("NO").to_matchable(),
Ref::keyword("DATA").to_matchable(),
])
.config(|config| {
config.optional();
})
.to_matchable(),
])
.to_matchable(),
])
.to_matchable(),
])
.to_matchable()
})
.to_matchable()
.into(),
),
(
"MsckRepairTableStatementSegment".into(),
NodeMatcher::new(SyntaxKind::MsckRepairTableStatement, |_| {
Sequence::new(vec![
Ref::keyword("MSCK").to_matchable(),
Ref::keyword("REPAIR").to_matchable(),
Ref::keyword("TABLE").to_matchable(),
Ref::new("TableReferenceSegment").to_matchable(),
])
.to_matchable()
})
.to_matchable()
.into(),
),
(
"RowFormatClauseSegment".into(),
NodeMatcher::new(SyntaxKind::RowFormatClause, |_| {
Sequence::new(vec![
Ref::keyword("ROW").to_matchable(),
Ref::keyword("FORMAT").to_matchable(),
one_of(vec![
Sequence::new(vec![
Ref::keyword("DELIMITED").to_matchable(),
Sequence::new(vec![
Ref::keyword("FIELDS").to_matchable(),
Ref::new("TerminatedByGrammar").to_matchable(),
Sequence::new(vec![
Ref::keyword("ESCAPED").to_matchable(),
Ref::keyword("BY").to_matchable(),
Ref::new("QuotedLiteralSegment").to_matchable(),
])
.config(|config| {
config.optional();
})
.to_matchable(),
])
.config(|config| {
config.optional();
})
.to_matchable(),
Sequence::new(vec![
Ref::keyword("COLLECTION").to_matchable(),
Ref::keyword("ITEMS").to_matchable(),
Ref::new("TerminatedByGrammar").to_matchable(),
])
.config(|config| {
config.optional();
})
.to_matchable(),
Sequence::new(vec![
Ref::keyword("MAP").to_matchable(),
Ref::keyword("KEYS").to_matchable(),
Ref::new("TerminatedByGrammar").to_matchable(),
])
.config(|config| {
config.optional();
})
.to_matchable(),
Sequence::new(vec![
Ref::keyword("LINES").to_matchable(),
Ref::new("TerminatedByGrammar").to_matchable(),
])
.config(|config| {
config.optional();
})
.to_matchable(),
Sequence::new(vec![
Ref::keyword("NULL").to_matchable(),
Ref::keyword("DEFINED").to_matchable(),
Ref::keyword("AS").to_matchable(),
Ref::new("QuotedLiteralSegment").to_matchable(),
])
.config(|config| {
config.optional();
})
.to_matchable(),
])
.to_matchable(),
Sequence::new(vec![
Ref::keyword("SERDE").to_matchable(),
Ref::new("QuotedLiteralSegment").to_matchable(),
Ref::new("SerdePropertiesGrammar").optional().to_matchable(),
])
.to_matchable(),
])
.to_matchable(),
])
.to_matchable()
})
.to_matchable()
.into(),
),
(
"InsertStatementSegment".into(),
NodeMatcher::new(SyntaxKind::InsertStatement, |_| {
Sequence::new(vec![
Ref::keyword("INSERT").to_matchable(),
Ref::keyword("INTO").to_matchable(),
Ref::new("TableReferenceSegment").to_matchable(),
one_of(vec![
optionally_bracketed(vec![Ref::new("SelectableGrammar").to_matchable()])
.to_matchable(),
Sequence::new(vec![
Ref::keyword("DEFAULT").to_matchable(),
Ref::keyword("VALUES").to_matchable(),
])
.to_matchable(),
Sequence::new(vec![
Ref::new("BracketedColumnReferenceListGrammar")
.optional()
.to_matchable(),
one_of(vec![
Ref::new("ValuesClauseSegment").to_matchable(),
optionally_bracketed(vec![
Ref::new("SelectableGrammar").to_matchable(),
])
.to_matchable(),
])
.to_matchable(),
])
.to_matchable(),
])
.to_matchable(),
])
.to_matchable()
})
.to_matchable()
.into(),
),
(
"UnloadStatementSegment".into(),
NodeMatcher::new(SyntaxKind::UnloadStatement, |_| {
Sequence::new(vec![
Ref::keyword("UNLOAD").to_matchable(),
Bracketed::new(vec![Ref::new("SelectableGrammar").to_matchable()])
.to_matchable(),
Ref::keyword("TO").to_matchable(),
Ref::new("QuotedLiteralSegment").to_matchable(),
Sequence::new(vec![
Ref::keyword("WITH").to_matchable(),
Ref::new("BracketedUnloadPropertyGrammar").to_matchable(),
])
.config(|config| {
config.optional();
})
.to_matchable(),
])
.to_matchable()
})
.to_matchable()
.into(),
),
(
"PrepareStatementSegment".into(),
NodeMatcher::new(SyntaxKind::PrepareStatement, |_| {
Sequence::new(vec![
Ref::keyword("PREPARE").to_matchable(),
Ref::new("TableReferenceSegment").to_matchable(),
Ref::keyword("FROM").to_matchable(),
optionally_bracketed(vec![
one_of(vec![
Ref::new("SelectableGrammar").to_matchable(),
Ref::new("UnloadStatementSegment").to_matchable(),
Ref::new("InsertStatementSegment").to_matchable(),
])
.to_matchable(),
])
.to_matchable(),
])
.to_matchable()
})
.to_matchable()
.into(),
),
(
"ExecuteStatementSegment".into(),
NodeMatcher::new(SyntaxKind::ExecuteStatement, |_| {
Sequence::new(vec![
Ref::keyword("EXECUTE").to_matchable(),
Ref::new("TableReferenceSegment").to_matchable(),
one_of(vec![
Sequence::new(vec![
Ref::keyword("USING").to_matchable(),
Delimited::new(vec![Ref::new("LiteralGrammar").to_matchable()])
.to_matchable(),
])
.to_matchable(),
])
.config(|config| {
config.optional();
})
.to_matchable(),
])
.to_matchable()
})
.to_matchable()
.into(),
),
(
"IntervalExpressionSegment".into(),
NodeMatcher::new(SyntaxKind::IntervalExpression, |_| {
Sequence::new(vec![
Ref::keyword("INTERVAL").optional().to_matchable(),
one_of(vec![
Sequence::new(vec![
one_of(vec![
Ref::new("QuotedLiteralSegment").to_matchable(),
Ref::new("NumericLiteralSegment").to_matchable(),
Bracketed::new(vec![Ref::new("ExpressionSegment").to_matchable()])
.to_matchable(),
])
.to_matchable(),
Ref::new("DatetimeUnitSegment").to_matchable(),
Sequence::new(vec![
Ref::keyword("TO").to_matchable(),
Ref::new("DatetimeUnitSegment").to_matchable(),
])
.config(|config| {
config.optional();
})
.to_matchable(),
])
.to_matchable(),
])
.to_matchable(),
])
.to_matchable()
})
.to_matchable()
.into(),
),
]);
dialect.add([
(
"AlterTableDropColumnGrammar".into(),
Sequence::new(vec![
Ref::keyword("DROP").to_matchable(),
Ref::keyword("COLUMN").to_matchable(),
Ref::new("SingleIdentifierGrammar").to_matchable(),
])
.to_matchable()
.into(),
),
(
"ShowStatementSegment".into(),
NodeMatcher::new(SyntaxKind::ShowStatement, |_| {
Sequence::new(vec![
Ref::keyword("SHOW").to_matchable(),
one_of(vec![
Sequence::new(vec![
Ref::keyword("COLUMNS").to_matchable(),
one_of(vec![
Ref::keyword("FROM").to_matchable(),
Ref::keyword("IN").to_matchable(),
])
.to_matchable(),
one_of(vec![
Sequence::new(vec![
Ref::new("DatabaseReferenceSegment").to_matchable(),
Ref::new("TableReferenceSegment").to_matchable(),
])
.to_matchable(),
Sequence::new(vec![
Ref::new("TableReferenceSegment").to_matchable(),
Sequence::new(vec![
one_of(vec![
Ref::keyword("FROM").to_matchable(),
Ref::keyword("IN").to_matchable(),
])
.to_matchable(),
Ref::new("DatabaseReferenceSegment").to_matchable(),
])
.config(|config| {
config.optional();
})
.to_matchable(),
])
.to_matchable(),
])
.to_matchable(),
])
.to_matchable(),
Sequence::new(vec![
Ref::keyword("CREATE").to_matchable(),
one_of(vec![
Ref::keyword("TABLE").to_matchable(),
Ref::keyword("VIEW").to_matchable(),
])
.to_matchable(),
Ref::new("TableReferenceSegment").to_matchable(),
])
.to_matchable(),
Sequence::new(vec![
one_of(vec![
Ref::keyword("DATABASES").to_matchable(),
Ref::keyword("SCHEMAS").to_matchable(),
])
.to_matchable(),
Sequence::new(vec![
Ref::keyword("LIKE").to_matchable(),
Ref::new("QuotedLiteralSegment").to_matchable(),
])
.config(|config| {
config.optional();
})
.to_matchable(),
])
.to_matchable(),
Sequence::new(vec![
Ref::keyword("PARTITIONS").to_matchable(),
Ref::new("TableReferenceSegment").to_matchable(),
])
.to_matchable(),
Sequence::new(vec![
Ref::keyword("TABLES").to_matchable(),
Sequence::new(vec![
Ref::keyword("IN").to_matchable(),
Ref::new("DatabaseReferenceSegment").to_matchable(),
])
.config(|config| {
config.optional();
})
.to_matchable(),
Ref::new("QuotedLiteralSegment").optional().to_matchable(),
])
.to_matchable(),
Sequence::new(vec![
Ref::keyword("TBLPROPERTIES").to_matchable(),
Ref::new("TableReferenceSegment").to_matchable(),
Bracketed::new(vec![Ref::new("QuotedLiteralSegment").to_matchable()])
.config(|config| {
config.optional();
})
.to_matchable(),
])
.to_matchable(),
Sequence::new(vec![
Ref::keyword("VIEWS").to_matchable(),
Sequence::new(vec![
Ref::keyword("IN").to_matchable(),
Ref::new("DatabaseReferenceSegment").to_matchable(),
])
.config(|config| {
config.optional();
})
.to_matchable(),
Sequence::new(vec![
Ref::keyword("LIKE").to_matchable(),
Ref::new("QuotedLiteralSegment").to_matchable(),
])
.config(|config| {
config.optional();
})
.to_matchable(),
])
.to_matchable(),
])
.to_matchable(),
])
.to_matchable()
})
.to_matchable()
.into(),
),
]);
dialect.config(|this| this.expand())
}