1use crate::linter::config::LintConfig;
7use crate::linter::rule::{LintContext, LintRule};
8use crate::types::{issue_codes, Dialect, Issue, IssueAutofixApplicability, IssuePatchEdit};
9use sqlparser::ast::*;
10use sqlparser::tokenizer::{Token, TokenWithSpan, Tokenizer, Whitespace};
11use std::collections::{HashMap, HashSet};
12
13#[derive(Clone, Copy, Debug, Eq, PartialEq)]
14enum AliasCaseCheck {
15 Dialect,
16 CaseInsensitive,
17 QuotedCsNakedUpper,
18 QuotedCsNakedLower,
19 CaseSensitive,
20}
21
22impl AliasCaseCheck {
23 fn from_config(config: &LintConfig) -> Self {
24 match config
25 .rule_option_str(issue_codes::LINT_AL_005, "alias_case_check")
26 .unwrap_or("dialect")
27 .to_ascii_lowercase()
28 .as_str()
29 {
30 "case_insensitive" => Self::CaseInsensitive,
31 "quoted_cs_naked_upper" => Self::QuotedCsNakedUpper,
32 "quoted_cs_naked_lower" => Self::QuotedCsNakedLower,
33 "case_sensitive" => Self::CaseSensitive,
34 _ => Self::Dialect,
35 }
36 }
37}
38
39#[derive(Clone, Debug, Eq, Hash, PartialEq)]
40struct AliasRef {
41 name: String,
42 quoted: bool,
43 relation_key: Option<String>,
44}
45
46#[derive(Clone, Debug, Eq, Hash, PartialEq)]
47struct QualifierRef {
48 name: String,
49 quoted: bool,
50}
51
52pub struct UnusedTableAlias {
53 alias_case_check: AliasCaseCheck,
54}
55
56impl UnusedTableAlias {
57 pub fn from_config(config: &LintConfig) -> Self {
58 Self {
59 alias_case_check: AliasCaseCheck::from_config(config),
60 }
61 }
62}
63
64impl Default for UnusedTableAlias {
65 fn default() -> Self {
66 Self {
67 alias_case_check: AliasCaseCheck::Dialect,
68 }
69 }
70}
71
72impl LintRule for UnusedTableAlias {
73 fn code(&self) -> &'static str {
74 issue_codes::LINT_AL_005
75 }
76
77 fn name(&self) -> &'static str {
78 "Unused table alias"
79 }
80
81 fn description(&self) -> &'static str {
82 "Tables should not be aliased if that alias is not used."
83 }
84
85 fn check(&self, stmt: &Statement, ctx: &LintContext) -> Vec<Issue> {
86 let mut issues = Vec::new();
87 match stmt {
88 Statement::Query(q) => check_query(q, self.alias_case_check, ctx, &mut issues),
89 Statement::Insert(ins) => {
90 if let Some(ref source) = ins.source {
91 check_query(source, self.alias_case_check, ctx, &mut issues);
92 }
93 }
94 Statement::CreateView(CreateView { query, .. }) => {
95 check_query(query, self.alias_case_check, ctx, &mut issues)
96 }
97 Statement::CreateTable(create) => {
98 if let Some(ref q) = create.query {
99 check_query(q, self.alias_case_check, ctx, &mut issues);
100 }
101 }
102 Statement::Delete(delete) => {
103 check_delete(delete, self.alias_case_check, ctx, &mut issues);
104 }
105 _ => {}
106 }
107
108 if let Some(first_issue) = issues.first_mut() {
109 let autofix_edits: Vec<IssuePatchEdit> = al005_legacy_autofix_edits(
110 ctx.statement_sql(),
111 ctx.dialect(),
112 self.alias_case_check,
113 )
114 .into_iter()
115 .map(|(start, end)| IssuePatchEdit::new(ctx.span_from_statement_offset(start, end), ""))
116 .collect();
117 if !autofix_edits.is_empty() {
118 *first_issue = first_issue
119 .clone()
120 .with_autofix_edits(IssueAutofixApplicability::Safe, autofix_edits);
121 }
122 }
123
124 issues
125 }
126}
127
128fn check_query(
129 query: &Query,
130 alias_case_check: AliasCaseCheck,
131 ctx: &LintContext,
132 issues: &mut Vec<Issue>,
133) {
134 if let Some(ref with) = query.with {
135 for cte in &with.cte_tables {
136 check_query(&cte.query, alias_case_check, ctx, issues);
137 }
138 }
139 match query.body.as_ref() {
140 SetExpr::Select(select) => check_select(
141 select,
142 query.order_by.as_ref(),
143 alias_case_check,
144 ctx,
145 issues,
146 ),
147 _ => check_set_expr(&query.body, alias_case_check, ctx, issues),
148 }
149}
150
151fn check_set_expr(
152 body: &SetExpr,
153 alias_case_check: AliasCaseCheck,
154 ctx: &LintContext,
155 issues: &mut Vec<Issue>,
156) {
157 match body {
158 SetExpr::Select(select) => {
159 check_select(select, None, alias_case_check, ctx, issues);
160 }
161 SetExpr::Query(q) => check_query(q, alias_case_check, ctx, issues),
162 SetExpr::SetOperation { left, right, .. } => {
163 check_set_expr(left, alias_case_check, ctx, issues);
164 check_set_expr(right, alias_case_check, ctx, issues);
165 }
166 _ => {}
167 }
168}
169
170fn check_select(
171 select: &Select,
172 order_by: Option<&OrderBy>,
173 alias_case_check: AliasCaseCheck,
174 ctx: &LintContext,
175 issues: &mut Vec<Issue>,
176) {
177 for from_item in &select.from {
178 check_table_factor_subqueries(&from_item.relation, alias_case_check, ctx, issues);
179 for join in &from_item.joins {
180 check_table_factor_subqueries(&join.relation, alias_case_check, ctx, issues);
181 }
182 }
183
184 let mut aliases: HashMap<String, AliasRef> = HashMap::new();
186 for from_item in &select.from {
187 collect_aliases(&from_item.relation, ctx.dialect(), &mut aliases);
188 for join in &from_item.joins {
189 collect_aliases(&join.relation, ctx.dialect(), &mut aliases);
190 }
191 }
192
193 if aliases.is_empty() {
194 return;
195 }
196
197 let mut used_prefixes: HashSet<QualifierRef> = HashSet::new();
198 collect_identifier_prefixes_from_select(select, order_by, ctx.dialect(), &mut used_prefixes);
199
200 if matches!(ctx.dialect(), Dialect::Redshift) {
201 if let Some(qualify) = &select.qualify {
202 if include_qualify_alias_references(ctx.dialect(), select) {
203 for alias in aliases.values() {
204 if redshift_qualify_uses_alias_prefixed_identifier(qualify, &alias.name) {
205 used_prefixes.insert(QualifierRef {
206 name: alias.name.clone(),
207 quoted: alias.quoted,
208 });
209 }
210 }
211 }
212 }
213 }
214
215 emit_unused_alias_issues(
216 &aliases,
217 &used_prefixes,
218 alias_case_check,
219 ctx.dialect(),
220 ctx.statement_index,
221 issues,
222 );
223}
224
225fn check_delete(
226 delete: &Delete,
227 alias_case_check: AliasCaseCheck,
228 ctx: &LintContext,
229 issues: &mut Vec<Issue>,
230) {
231 let mut aliases: HashMap<String, AliasRef> = HashMap::new();
232 let mut used_prefixes: HashSet<QualifierRef> = HashSet::new();
233
234 for table in delete_source_tables(delete) {
235 check_table_factor_subqueries(&table.relation, alias_case_check, ctx, issues);
236 collect_aliases(&table.relation, ctx.dialect(), &mut aliases);
237 collect_identifier_prefixes_from_table_factor(
238 &table.relation,
239 ctx.dialect(),
240 &mut used_prefixes,
241 );
242
243 for join in &table.joins {
244 check_table_factor_subqueries(&join.relation, alias_case_check, ctx, issues);
245 collect_aliases(&join.relation, ctx.dialect(), &mut aliases);
246 collect_identifier_prefixes_from_table_factor(
247 &join.relation,
248 ctx.dialect(),
249 &mut used_prefixes,
250 );
251 if let Some(constraint) = join_constraint(&join.join_operator) {
252 collect_identifier_prefixes(constraint, ctx.dialect(), &mut used_prefixes);
253 }
254 }
255 }
256
257 if let Some(selection) = &delete.selection {
258 collect_identifier_prefixes(selection, ctx.dialect(), &mut used_prefixes);
259 }
260 if let Some(returning) = &delete.returning {
261 for item in returning {
262 collect_identifier_prefixes_from_select_item(item, ctx.dialect(), &mut used_prefixes);
263 }
264 }
265 for order_expr in &delete.order_by {
266 collect_identifier_prefixes(&order_expr.expr, ctx.dialect(), &mut used_prefixes);
267 }
268 if let Some(limit) = &delete.limit {
269 collect_identifier_prefixes(limit, ctx.dialect(), &mut used_prefixes);
270 }
271
272 emit_unused_alias_issues(
273 &aliases,
274 &used_prefixes,
275 alias_case_check,
276 ctx.dialect(),
277 ctx.statement_index,
278 issues,
279 );
280}
281
282fn delete_source_tables(delete: &Delete) -> Vec<&TableWithJoins> {
283 let mut tables = Vec::new();
284
285 match &delete.from {
286 FromTable::WithFromKeyword(from) | FromTable::WithoutKeyword(from) => {
287 tables.extend(from.iter());
288 }
289 }
290
291 if let Some(using_tables) = &delete.using {
292 tables.extend(using_tables.iter());
293 }
294
295 tables
296}
297
298fn emit_unused_alias_issues(
299 aliases: &HashMap<String, AliasRef>,
300 used_prefixes: &HashSet<QualifierRef>,
301 alias_case_check: AliasCaseCheck,
302 dialect: Dialect,
303 statement_index: usize,
304 issues: &mut Vec<Issue>,
305) {
306 if aliases.is_empty() {
307 return;
308 }
309
310 let mut used_alias_names: HashSet<String> = HashSet::new();
311 for alias in aliases.values() {
312 let used = used_prefixes
313 .iter()
314 .any(|prefix| qualifier_matches_alias(prefix, alias, alias_case_check, dialect));
315 if used {
316 used_alias_names.insert(alias.name.clone());
317 }
318 }
319
320 let mut relation_alias_counts: HashMap<String, usize> = HashMap::new();
321 let mut relations_with_used_alias: HashSet<String> = HashSet::new();
322 for alias in aliases.values() {
323 let Some(relation_key) = &alias.relation_key else {
324 continue;
325 };
326 *relation_alias_counts
327 .entry(relation_key.clone())
328 .or_insert(0) += 1;
329 if used_alias_names.contains(&alias.name) {
330 relations_with_used_alias.insert(relation_key.clone());
331 }
332 }
333
334 for alias in aliases.values() {
335 if used_alias_names.contains(&alias.name) {
336 continue;
337 }
338
339 let repeated_self_join_alias_exempt = alias.relation_key.as_ref().is_some_and(|key| {
340 relation_alias_counts.get(key).copied().unwrap_or_default() > 1
341 && relations_with_used_alias.contains(key)
342 });
343
344 if repeated_self_join_alias_exempt {
345 continue;
346 }
347
348 issues.push(
349 Issue::warning(
350 issue_codes::LINT_AL_005,
351 format!(
352 "Table alias '{}' is defined but never referenced.",
353 alias.name
354 ),
355 )
356 .with_statement(statement_index),
357 );
358 }
359}
360
361fn collect_identifier_prefixes_from_order_by(
362 order_by: &OrderBy,
363 dialect: Dialect,
364 prefixes: &mut HashSet<QualifierRef>,
365) {
366 if let OrderByKind::Expressions(order_by_exprs) = &order_by.kind {
367 for order_expr in order_by_exprs {
368 collect_identifier_prefixes(&order_expr.expr, dialect, prefixes);
369 }
370 }
371}
372
373fn collect_identifier_prefixes_from_query(
374 query: &Query,
375 dialect: Dialect,
376 prefixes: &mut HashSet<QualifierRef>,
377) {
378 if let Some(ref with) = query.with {
379 for cte in &with.cte_tables {
380 collect_identifier_prefixes_from_query(&cte.query, dialect, prefixes);
381 }
382 }
383
384 match query.body.as_ref() {
385 SetExpr::Select(select) => {
386 collect_identifier_prefixes_from_select(
387 select,
388 query.order_by.as_ref(),
389 dialect,
390 prefixes,
391 );
392 }
393 SetExpr::Query(q) => collect_identifier_prefixes_from_query(q, dialect, prefixes),
394 SetExpr::SetOperation { left, right, .. } => {
395 collect_identifier_prefixes_from_set_expr(left, dialect, prefixes);
396 collect_identifier_prefixes_from_set_expr(right, dialect, prefixes);
397 }
398 _ => {}
399 }
400}
401
402fn collect_identifier_prefixes_from_set_expr(
403 body: &SetExpr,
404 dialect: Dialect,
405 prefixes: &mut HashSet<QualifierRef>,
406) {
407 match body {
408 SetExpr::Select(select) => {
409 collect_identifier_prefixes_from_select(select, None, dialect, prefixes)
410 }
411 SetExpr::Query(q) => collect_identifier_prefixes_from_query(q, dialect, prefixes),
412 SetExpr::SetOperation { left, right, .. } => {
413 collect_identifier_prefixes_from_set_expr(left, dialect, prefixes);
414 collect_identifier_prefixes_from_set_expr(right, dialect, prefixes);
415 }
416 _ => {}
417 }
418}
419
420fn collect_identifier_prefixes_from_select(
421 select: &Select,
422 order_by: Option<&OrderBy>,
423 dialect: Dialect,
424 prefixes: &mut HashSet<QualifierRef>,
425) {
426 for item in &select.projection {
427 collect_identifier_prefixes_from_select_item(item, dialect, prefixes);
428 }
429 if let Some(ref prewhere) = select.prewhere {
430 collect_identifier_prefixes(prewhere, dialect, prefixes);
431 }
432 if let Some(ref selection) = select.selection {
433 collect_identifier_prefixes(selection, dialect, prefixes);
434 }
435 if let GroupByExpr::Expressions(exprs, _) = &select.group_by {
436 for expr in exprs {
437 collect_identifier_prefixes(expr, dialect, prefixes);
438 }
439 }
440 for expr in &select.cluster_by {
441 collect_identifier_prefixes(expr, dialect, prefixes);
442 }
443 for expr in &select.distribute_by {
444 collect_identifier_prefixes(expr, dialect, prefixes);
445 }
446 for sort_expr in &select.sort_by {
447 collect_identifier_prefixes(&sort_expr.expr, dialect, prefixes);
448 }
449 if let Some(ref having) = select.having {
450 collect_identifier_prefixes(having, dialect, prefixes);
451 }
452 if let Some(ref qualify) = select.qualify {
453 if include_qualify_alias_references(dialect, select) {
454 collect_identifier_prefixes(qualify, dialect, prefixes);
455 }
456 }
457 if let Some(Distinct::On(exprs)) = &select.distinct {
458 for expr in exprs {
459 collect_identifier_prefixes(expr, dialect, prefixes);
460 }
461 }
462 for named_window in &select.named_window {
463 if let NamedWindowExpr::WindowSpec(spec) = &named_window.1 {
464 for expr in &spec.partition_by {
465 collect_identifier_prefixes(expr, dialect, prefixes);
466 }
467 for order_expr in &spec.order_by {
468 collect_identifier_prefixes(&order_expr.expr, dialect, prefixes);
469 }
470 }
471 }
472 for lateral_view in &select.lateral_views {
473 collect_identifier_prefixes(&lateral_view.lateral_view, dialect, prefixes);
474 }
475 for connect_by_kind in &select.connect_by {
476 match connect_by_kind {
477 ConnectByKind::ConnectBy { relationships, .. } => {
478 for relationship in relationships {
479 collect_identifier_prefixes(relationship, dialect, prefixes);
480 }
481 }
482 ConnectByKind::StartWith { condition, .. } => {
483 collect_identifier_prefixes(condition, dialect, prefixes);
484 }
485 }
486 }
487 for from_item in &select.from {
488 collect_identifier_prefixes_from_table_factor(&from_item.relation, dialect, prefixes);
489 for join in &from_item.joins {
490 collect_identifier_prefixes_from_table_factor(&join.relation, dialect, prefixes);
491 if let Some(constraint) = join_constraint(&join.join_operator) {
492 collect_identifier_prefixes(constraint, dialect, prefixes);
493 }
494 }
495 }
496 if let Some(order_by) = order_by {
497 collect_identifier_prefixes_from_order_by(order_by, dialect, prefixes);
498 }
499}
500
501fn collect_aliases(
502 relation: &TableFactor,
503 dialect: Dialect,
504 aliases: &mut HashMap<String, AliasRef>,
505) {
506 match relation {
507 TableFactor::Table {
508 name,
509 alias: Some(alias),
510 args,
511 ..
512 } => {
513 if args.is_some() {
514 return;
515 }
516 if is_implicit_array_relation_alias(dialect, name, aliases) {
517 return;
518 }
519 let table_name = name.to_string();
520 let alias_name = alias.name.value.clone();
521 if alias_name.to_uppercase() != table_name.to_uppercase() {
523 aliases.insert(
524 alias_name.clone(),
525 AliasRef {
526 name: alias_name,
527 quoted: alias.name.quote_style.is_some(),
528 relation_key: Some(table_name.to_ascii_uppercase()),
529 },
530 );
531 }
532 }
533 TableFactor::Derived {
534 subquery,
535 alias: Some(alias),
536 ..
537 } if derived_values_alias_can_be_unused(dialect, subquery) => {
538 aliases.insert(
539 alias.name.value.clone(),
540 AliasRef {
541 name: alias.name.value.clone(),
542 quoted: alias.name.quote_style.is_some(),
543 relation_key: None,
544 },
545 );
546 }
547 TableFactor::Function {
548 lateral: true,
549 alias: Some(alias),
550 ..
551 } => {
552 aliases.insert(
553 alias.name.value.clone(),
554 AliasRef {
555 name: alias.name.value.clone(),
556 quoted: alias.name.quote_style.is_some(),
557 relation_key: None,
558 },
559 );
560 }
561 TableFactor::NestedJoin {
562 table_with_joins, ..
563 } => {
564 collect_aliases(&table_with_joins.relation, dialect, aliases);
565 for join in &table_with_joins.joins {
566 collect_aliases(&join.relation, dialect, aliases);
567 }
568 }
569 TableFactor::Pivot { table, .. }
570 | TableFactor::Unpivot { table, .. }
571 | TableFactor::MatchRecognize { table, .. } => collect_aliases(table, dialect, aliases),
572 _ => {}
573 }
574}
575
576fn collect_identifier_prefixes_from_select_item(
577 item: &SelectItem,
578 dialect: Dialect,
579 prefixes: &mut HashSet<QualifierRef>,
580) {
581 match item {
582 SelectItem::UnnamedExpr(expr) | SelectItem::ExprWithAlias { expr, .. } => {
583 collect_identifier_prefixes(expr, dialect, prefixes);
584 }
585 SelectItem::QualifiedWildcard(name, _) => {
586 let name_str = name.to_string();
587 if let Some(prefix) = name_str.split('.').next() {
588 prefixes.insert(QualifierRef {
589 name: prefix
590 .trim_matches(|ch| matches!(ch, '"' | '`' | '\'' | '[' | ']'))
591 .to_string(),
592 quoted: prefix.starts_with('"')
593 || prefix.starts_with('`')
594 || prefix.starts_with('['),
595 });
596 }
597 }
598 _ => {}
599 }
600}
601
602fn collect_identifier_prefixes(
603 expr: &Expr,
604 dialect: Dialect,
605 prefixes: &mut HashSet<QualifierRef>,
606) {
607 match expr {
608 Expr::CompoundIdentifier(parts) if parts.len() >= 2 => {
609 prefixes.insert(QualifierRef {
610 name: parts[0].value.clone(),
611 quoted: parts[0].quote_style.is_some(),
612 });
613 }
614 Expr::BinaryOp { left, right, .. } => {
615 collect_identifier_prefixes(left, dialect, prefixes);
616 collect_identifier_prefixes(right, dialect, prefixes);
617 }
618 Expr::UnaryOp { expr: inner, .. } => collect_identifier_prefixes(inner, dialect, prefixes),
619 Expr::Nested(inner) => collect_identifier_prefixes(inner, dialect, prefixes),
620 Expr::Function(func) => {
621 let function_name = function_name(func);
622 if let FunctionArguments::List(arg_list) = &func.args {
623 for (index, arg) in arg_list.args.iter().enumerate() {
624 match arg {
625 FunctionArg::Unnamed(FunctionArgExpr::Expr(e))
626 | FunctionArg::Named {
627 arg: FunctionArgExpr::Expr(e),
628 ..
629 } => {
630 collect_identifier_prefixes(e, dialect, prefixes);
631 if function_arg_is_table_alias_reference(
632 dialect,
633 function_name.as_str(),
634 index,
635 ) {
636 if let Expr::Identifier(ident) = e {
637 prefixes.insert(QualifierRef {
638 name: ident.value.clone(),
639 quoted: ident.quote_style.is_some(),
640 });
641 }
642 }
643 }
644 _ => {}
645 }
646 }
647 }
648 if let Some(filter) = &func.filter {
649 collect_identifier_prefixes(filter, dialect, prefixes);
650 }
651 for order_expr in &func.within_group {
652 collect_identifier_prefixes(&order_expr.expr, dialect, prefixes);
653 }
654 if let Some(WindowType::WindowSpec(spec)) = &func.over {
655 for expr in &spec.partition_by {
656 collect_identifier_prefixes(expr, dialect, prefixes);
657 }
658 for order_expr in &spec.order_by {
659 collect_identifier_prefixes(&order_expr.expr, dialect, prefixes);
660 }
661 }
662 }
663 Expr::IsNull(inner)
664 | Expr::IsNotNull(inner)
665 | Expr::Cast { expr: inner, .. }
666 | Expr::JsonAccess { value: inner, .. } => {
667 collect_identifier_prefixes(inner, dialect, prefixes);
668 }
669 Expr::Case {
670 operand,
671 conditions,
672 else_result,
673 ..
674 } => {
675 if let Some(op) = operand {
676 collect_identifier_prefixes(op, dialect, prefixes);
677 }
678 for case_when in conditions {
679 collect_identifier_prefixes(&case_when.condition, dialect, prefixes);
680 collect_identifier_prefixes(&case_when.result, dialect, prefixes);
681 }
682 if let Some(el) = else_result {
683 collect_identifier_prefixes(el, dialect, prefixes);
684 }
685 }
686 Expr::InList { expr, list, .. } => {
687 collect_identifier_prefixes(expr, dialect, prefixes);
688 for item in list {
689 collect_identifier_prefixes(item, dialect, prefixes);
690 }
691 }
692 Expr::InSubquery { expr, subquery, .. } => {
693 collect_identifier_prefixes(expr, dialect, prefixes);
694 collect_identifier_prefixes_from_query(subquery, dialect, prefixes);
695 }
696 Expr::AnyOp { left, right, .. } | Expr::AllOp { left, right, .. } => {
697 collect_identifier_prefixes(left, dialect, prefixes);
698 collect_identifier_prefixes(right, dialect, prefixes);
699 }
700 Expr::Subquery(subquery) | Expr::Exists { subquery, .. } => {
701 collect_identifier_prefixes_from_query(subquery, dialect, prefixes);
702 }
703 Expr::Between {
704 expr, low, high, ..
705 } => {
706 collect_identifier_prefixes(expr, dialect, prefixes);
707 collect_identifier_prefixes(low, dialect, prefixes);
708 collect_identifier_prefixes(high, dialect, prefixes);
709 }
710 _ => {}
711 }
712}
713
714fn collect_identifier_prefixes_from_table_factor(
715 table_factor: &TableFactor,
716 dialect: Dialect,
717 prefixes: &mut HashSet<QualifierRef>,
718) {
719 match table_factor {
720 TableFactor::Table { name, .. } => {
721 if let Some(prefix) = implicit_array_relation_prefix(dialect, name) {
722 prefixes.insert(prefix);
723 }
724 }
725 TableFactor::Derived {
726 lateral: true,
727 subquery,
728 ..
729 } => collect_identifier_prefixes_from_query(subquery, dialect, prefixes),
730 TableFactor::TableFunction { expr, .. } => {
731 collect_identifier_prefixes(expr, dialect, prefixes);
732 }
733 TableFactor::Function { args, .. } => {
734 for arg in args {
735 collect_identifier_prefixes_from_function_arg(arg, dialect, prefixes);
736 }
737 }
738 TableFactor::UNNEST { array_exprs, .. } => {
739 for expr in array_exprs {
740 collect_identifier_prefixes(expr, dialect, prefixes);
741 }
742 }
743 TableFactor::JsonTable { json_expr, .. } | TableFactor::OpenJsonTable { json_expr, .. } => {
744 collect_identifier_prefixes(json_expr, dialect, prefixes);
745 }
746 TableFactor::NestedJoin {
747 table_with_joins, ..
748 } => {
749 collect_identifier_prefixes_from_table_factor(
750 &table_with_joins.relation,
751 dialect,
752 prefixes,
753 );
754 for join in &table_with_joins.joins {
755 collect_identifier_prefixes_from_table_factor(&join.relation, dialect, prefixes);
756 if let Some(constraint) = join_constraint(&join.join_operator) {
757 collect_identifier_prefixes(constraint, dialect, prefixes);
758 }
759 }
760 }
761 TableFactor::Pivot {
762 table,
763 aggregate_functions,
764 value_column,
765 default_on_null,
766 ..
767 } => {
768 collect_identifier_prefixes_from_table_factor(table, dialect, prefixes);
769 for expr_with_alias in aggregate_functions {
770 collect_identifier_prefixes(&expr_with_alias.expr, dialect, prefixes);
771 }
772 for expr in value_column {
773 collect_identifier_prefixes(expr, dialect, prefixes);
774 }
775 if let Some(expr) = default_on_null {
776 collect_identifier_prefixes(expr, dialect, prefixes);
777 }
778 }
779 TableFactor::Unpivot {
780 table,
781 value,
782 columns,
783 ..
784 } => {
785 collect_identifier_prefixes_from_table_factor(table, dialect, prefixes);
786 collect_identifier_prefixes(value, dialect, prefixes);
787 for expr_with_alias in columns {
788 collect_identifier_prefixes(&expr_with_alias.expr, dialect, prefixes);
789 }
790 }
791 TableFactor::MatchRecognize {
792 table,
793 partition_by,
794 order_by,
795 measures,
796 ..
797 } => {
798 collect_identifier_prefixes_from_table_factor(table, dialect, prefixes);
799 for expr in partition_by {
800 collect_identifier_prefixes(expr, dialect, prefixes);
801 }
802 for order in order_by {
803 collect_identifier_prefixes(&order.expr, dialect, prefixes);
804 }
805 for measure in measures {
806 collect_identifier_prefixes(&measure.expr, dialect, prefixes);
807 }
808 }
809 TableFactor::XmlTable { row_expression, .. } => {
810 collect_identifier_prefixes(row_expression, dialect, prefixes);
811 }
812 _ => {}
813 }
814}
815
816fn collect_identifier_prefixes_from_function_arg(
817 arg: &FunctionArg,
818 dialect: Dialect,
819 prefixes: &mut HashSet<QualifierRef>,
820) {
821 match arg {
822 FunctionArg::Unnamed(FunctionArgExpr::Expr(expr))
823 | FunctionArg::Named {
824 arg: FunctionArgExpr::Expr(expr),
825 ..
826 } => collect_identifier_prefixes(expr, dialect, prefixes),
827 _ => {}
828 }
829}
830
831fn function_name(function: &Function) -> String {
832 function
833 .name
834 .0
835 .last()
836 .and_then(ObjectNamePart::as_ident)
837 .map(|ident| ident.value.to_ascii_uppercase())
838 .unwrap_or_default()
839}
840
841fn function_arg_is_table_alias_reference(
842 dialect: Dialect,
843 function_name: &str,
844 arg_index: usize,
845) -> bool {
846 matches!(dialect, Dialect::Bigquery) && arg_index == 0 && function_name == "TO_JSON_STRING"
847}
848
849fn include_qualify_alias_references(dialect: Dialect, select: &Select) -> bool {
850 !matches!(dialect, Dialect::Redshift) || select.selection.is_none()
853}
854
855fn derived_values_alias_can_be_unused(dialect: Dialect, subquery: &Query) -> bool {
856 matches!(dialect, Dialect::Databricks) && matches!(subquery.body.as_ref(), SetExpr::Values(_))
860}
861
862fn redshift_qualify_uses_alias_prefixed_identifier(expr: &Expr, alias: &str) -> bool {
863 match expr {
864 Expr::Identifier(identifier) => {
865 let value = identifier.value.as_str();
866 value
867 .strip_prefix(alias)
868 .is_some_and(|suffix| suffix.starts_with('_'))
869 || value
870 .to_ascii_uppercase()
871 .strip_prefix(&alias.to_ascii_uppercase())
872 .is_some_and(|suffix| suffix.starts_with('_'))
873 }
874 Expr::CompoundIdentifier(_) => false,
875 Expr::BinaryOp { left, right, .. }
876 | Expr::AnyOp { left, right, .. }
877 | Expr::AllOp { left, right, .. } => {
878 redshift_qualify_uses_alias_prefixed_identifier(left, alias)
879 || redshift_qualify_uses_alias_prefixed_identifier(right, alias)
880 }
881 Expr::UnaryOp { expr: inner, .. }
882 | Expr::Nested(inner)
883 | Expr::IsNull(inner)
884 | Expr::IsNotNull(inner)
885 | Expr::Cast { expr: inner, .. } => {
886 redshift_qualify_uses_alias_prefixed_identifier(inner, alias)
887 }
888 Expr::InList { expr, list, .. } => {
889 redshift_qualify_uses_alias_prefixed_identifier(expr, alias)
890 || list
891 .iter()
892 .any(|item| redshift_qualify_uses_alias_prefixed_identifier(item, alias))
893 }
894 Expr::Between {
895 expr, low, high, ..
896 } => {
897 redshift_qualify_uses_alias_prefixed_identifier(expr, alias)
898 || redshift_qualify_uses_alias_prefixed_identifier(low, alias)
899 || redshift_qualify_uses_alias_prefixed_identifier(high, alias)
900 }
901 Expr::Case {
902 operand,
903 conditions,
904 else_result,
905 ..
906 } => {
907 operand
908 .as_ref()
909 .is_some_and(|inner| redshift_qualify_uses_alias_prefixed_identifier(inner, alias))
910 || conditions.iter().any(|when| {
911 redshift_qualify_uses_alias_prefixed_identifier(&when.condition, alias)
912 || redshift_qualify_uses_alias_prefixed_identifier(&when.result, alias)
913 })
914 || else_result.as_ref().is_some_and(|inner| {
915 redshift_qualify_uses_alias_prefixed_identifier(inner, alias)
916 })
917 }
918 Expr::Function(function) => {
919 let args_match = if let FunctionArguments::List(arguments) = &function.args {
920 arguments.args.iter().any(|arg| match arg {
921 FunctionArg::Unnamed(FunctionArgExpr::Expr(expr))
922 | FunctionArg::Named {
923 arg: FunctionArgExpr::Expr(expr),
924 ..
925 } => redshift_qualify_uses_alias_prefixed_identifier(expr, alias),
926 _ => false,
927 })
928 } else {
929 false
930 };
931 let filter_match = function.filter.as_ref().is_some_and(|filter| {
932 redshift_qualify_uses_alias_prefixed_identifier(filter, alias)
933 });
934 let within_group_match = function.within_group.iter().any(|order_expr| {
935 redshift_qualify_uses_alias_prefixed_identifier(&order_expr.expr, alias)
936 });
937 let over_match = match &function.over {
938 Some(WindowType::WindowSpec(spec)) => {
939 spec.partition_by
940 .iter()
941 .any(|expr| redshift_qualify_uses_alias_prefixed_identifier(expr, alias))
942 || spec.order_by.iter().any(|order_expr| {
943 redshift_qualify_uses_alias_prefixed_identifier(&order_expr.expr, alias)
944 })
945 }
946 _ => false,
947 };
948 args_match || filter_match || within_group_match || over_match
949 }
950 _ => false,
951 }
952}
953
954fn implicit_array_relation_prefix(dialect: Dialect, name: &ObjectName) -> Option<QualifierRef> {
955 if !matches!(dialect, Dialect::Bigquery | Dialect::Redshift) {
956 return None;
957 }
958 if name.0.len() != 2 {
959 return None;
960 }
961 let first = name.0.first()?.as_ident()?;
962 Some(QualifierRef {
963 name: first.value.clone(),
964 quoted: first.quote_style.is_some(),
965 })
966}
967
968fn is_implicit_array_relation_alias(
969 dialect: Dialect,
970 name: &ObjectName,
971 aliases: &HashMap<String, AliasRef>,
972) -> bool {
973 let Some(prefix) = implicit_array_relation_prefix(dialect, name) else {
974 return false;
975 };
976 aliases
977 .values()
978 .any(|alias| alias.name.eq_ignore_ascii_case(&prefix.name))
979}
980
981fn qualifier_matches_alias(
982 qualifier: &QualifierRef,
983 alias: &AliasRef,
984 alias_case_check: AliasCaseCheck,
985 dialect: Dialect,
986) -> bool {
987 match alias_case_check {
988 AliasCaseCheck::CaseInsensitive => qualifier.name.eq_ignore_ascii_case(&alias.name),
989 AliasCaseCheck::CaseSensitive => qualifier.name == alias.name,
990 AliasCaseCheck::Dialect => {
991 normalize_identifier_for_dialect(&qualifier.name, qualifier.quoted, dialect)
992 == normalize_identifier_for_dialect(&alias.name, alias.quoted, dialect)
993 }
994 AliasCaseCheck::QuotedCsNakedUpper => {
995 normalize_case_for_mode(qualifier, alias_case_check)
996 == normalize_case_for_mode_alias(alias, alias_case_check)
997 }
998 AliasCaseCheck::QuotedCsNakedLower => {
999 normalize_case_for_mode(qualifier, alias_case_check)
1000 == normalize_case_for_mode_alias(alias, alias_case_check)
1001 }
1002 }
1003}
1004
1005fn normalize_identifier_for_dialect(identifier: &str, quoted: bool, dialect: Dialect) -> String {
1006 if quoted && !quoted_identifiers_case_insensitive_for_dialect(dialect) {
1007 identifier.to_string()
1008 } else {
1009 normalize_naked_identifier_for_dialect(identifier, dialect)
1010 }
1011}
1012
1013fn normalize_naked_identifier_for_dialect(identifier: &str, dialect: Dialect) -> String {
1014 if matches!(
1015 dialect,
1016 Dialect::Postgres
1017 | Dialect::Redshift
1018 | Dialect::Mysql
1019 | Dialect::Sqlite
1020 | Dialect::Mssql
1021 | Dialect::Clickhouse
1022 ) {
1023 identifier.to_ascii_lowercase()
1024 } else {
1025 identifier.to_ascii_uppercase()
1026 }
1027}
1028
1029fn quoted_identifiers_case_insensitive_for_dialect(dialect: Dialect) -> bool {
1030 matches!(
1031 dialect,
1032 Dialect::Duckdb | Dialect::Hive | Dialect::Sqlite | Dialect::Databricks
1033 )
1034}
1035
1036fn normalize_case_for_mode(reference: &QualifierRef, mode: AliasCaseCheck) -> String {
1037 match mode {
1038 AliasCaseCheck::QuotedCsNakedUpper => {
1039 if reference.quoted {
1040 reference.name.clone()
1041 } else {
1042 reference.name.to_ascii_uppercase()
1043 }
1044 }
1045 AliasCaseCheck::QuotedCsNakedLower => {
1046 if reference.quoted {
1047 reference.name.clone()
1048 } else {
1049 reference.name.to_ascii_lowercase()
1050 }
1051 }
1052 _ => reference.name.clone(),
1053 }
1054}
1055
1056fn normalize_case_for_mode_alias(alias: &AliasRef, mode: AliasCaseCheck) -> String {
1057 match mode {
1058 AliasCaseCheck::QuotedCsNakedUpper => {
1059 if alias.quoted {
1060 alias.name.clone()
1061 } else {
1062 alias.name.to_ascii_uppercase()
1063 }
1064 }
1065 AliasCaseCheck::QuotedCsNakedLower => {
1066 if alias.quoted {
1067 alias.name.clone()
1068 } else {
1069 alias.name.to_ascii_lowercase()
1070 }
1071 }
1072 _ => alias.name.clone(),
1073 }
1074}
1075
1076fn join_constraint(op: &JoinOperator) -> Option<&Expr> {
1077 let constraint = match op {
1078 JoinOperator::Join(c)
1079 | JoinOperator::Left(c)
1080 | JoinOperator::Inner(c)
1081 | JoinOperator::Right(c)
1082 | JoinOperator::LeftOuter(c)
1083 | JoinOperator::RightOuter(c)
1084 | JoinOperator::FullOuter(c)
1085 | JoinOperator::LeftSemi(c)
1086 | JoinOperator::RightSemi(c)
1087 | JoinOperator::LeftAnti(c)
1088 | JoinOperator::RightAnti(c) => c,
1089 _ => return None,
1090 };
1091 match constraint {
1092 JoinConstraint::On(expr) => Some(expr),
1093 _ => None,
1094 }
1095}
1096
1097fn check_table_factor_subqueries(
1098 relation: &TableFactor,
1099 alias_case_check: AliasCaseCheck,
1100 ctx: &LintContext,
1101 issues: &mut Vec<Issue>,
1102) {
1103 match relation {
1104 TableFactor::Derived { subquery, .. } => {
1105 check_query(subquery, alias_case_check, ctx, issues);
1106 }
1107 TableFactor::NestedJoin {
1108 table_with_joins, ..
1109 } => {
1110 check_table_factor_subqueries(
1111 &table_with_joins.relation,
1112 alias_case_check,
1113 ctx,
1114 issues,
1115 );
1116 for join in &table_with_joins.joins {
1117 check_table_factor_subqueries(&join.relation, alias_case_check, ctx, issues);
1118 }
1119 }
1120 TableFactor::Pivot { table, .. }
1121 | TableFactor::Unpivot { table, .. }
1122 | TableFactor::MatchRecognize { table, .. } => {
1123 check_table_factor_subqueries(table, alias_case_check, ctx, issues);
1124 }
1125 _ => {}
1126 }
1127}
1128
1129#[derive(Debug, Clone)]
1130struct LegacySimpleTableAliasDecl {
1131 table_end: usize,
1132 alias_end: usize,
1133 alias: String,
1134 quoted: bool,
1135}
1136
1137#[derive(Clone)]
1138struct LegacyLocatedToken {
1139 token: Token,
1140 end: usize,
1141}
1142
1143fn al005_legacy_autofix_edits(
1144 sql: &str,
1145 dialect: Dialect,
1146 alias_case_check: AliasCaseCheck,
1147) -> Vec<(usize, usize)> {
1148 let Some(decls) = legacy_collect_simple_table_alias_declarations(sql, dialect) else {
1149 return Vec::new();
1150 };
1151 if decls.is_empty() {
1152 return Vec::new();
1153 }
1154
1155 let mut seen_aliases = HashSet::new();
1156 let mut removals = Vec::new();
1157 for decl in &decls {
1158 let alias_key = decl.alias.to_ascii_lowercase();
1159 if !seen_aliases.insert(alias_key.clone()) {
1160 continue;
1161 }
1162 if legacy_is_sql_keyword(&decl.alias) || legacy_is_generated_alias_identifier(&decl.alias) {
1163 continue;
1164 }
1165 if legacy_contains_alias_qualifier_dialect(
1166 sql,
1167 &decl.alias,
1168 decl.quoted,
1169 dialect,
1170 alias_case_check,
1171 ) {
1172 continue;
1173 }
1174
1175 removals.extend(
1176 decls
1177 .iter()
1178 .filter(|candidate| candidate.alias.eq_ignore_ascii_case(&alias_key))
1179 .map(|candidate| (candidate.table_end, candidate.alias_end)),
1180 );
1181 }
1182
1183 removals.sort_unstable();
1184 removals.dedup();
1185 removals.retain(|(start, end)| start < end);
1186 removals
1187}
1188
1189fn legacy_collect_simple_table_alias_declarations(
1190 sql: &str,
1191 dialect: Dialect,
1192) -> Option<Vec<LegacySimpleTableAliasDecl>> {
1193 let tokens = legacy_tokenize_with_offsets(sql, dialect)?;
1194 let mut out = Vec::new();
1195 let mut index = 0usize;
1196
1197 while index < tokens.len() {
1198 if !legacy_token_matches_keyword(&tokens[index].token, "FROM")
1199 && !legacy_token_matches_keyword(&tokens[index].token, "JOIN")
1200 {
1201 index += 1;
1202 continue;
1203 }
1204
1205 let Some(next) = legacy_next_non_trivia_token(&tokens, index + 1) else {
1207 index += 1;
1208 continue;
1209 };
1210 index = legacy_try_parse_table_item(&tokens, next, dialect, &mut out);
1211
1212 while let Some(comma_index) = legacy_next_non_trivia_token(&tokens, index) {
1214 if !matches!(tokens[comma_index].token, Token::Comma) {
1215 break;
1216 }
1217 let Some(next_item) = legacy_next_non_trivia_token(&tokens, comma_index + 1) else {
1218 index = comma_index + 1;
1219 break;
1220 };
1221 index = legacy_try_parse_table_item(&tokens, next_item, dialect, &mut out);
1222 }
1223 }
1224
1225 Some(out)
1226}
1227
1228fn legacy_try_parse_table_item(
1231 tokens: &[LegacyLocatedToken],
1232 start: usize,
1233 dialect: Dialect,
1234 out: &mut Vec<LegacySimpleTableAliasDecl>,
1235) -> usize {
1236 if start >= tokens.len() {
1237 return start;
1238 }
1239
1240 if legacy_token_matches_keyword(&tokens[start].token, "LATERAL") {
1242 if let Some(func_end) = legacy_skip_lateral_function_call(tokens, start + 1) {
1243 let Some(mut alias_index) = legacy_next_non_trivia_token(tokens, func_end) else {
1244 return func_end;
1245 };
1246 if legacy_token_matches_keyword(&tokens[alias_index].token, "AS") {
1247 let Some(next_index) = legacy_next_non_trivia_token(tokens, alias_index + 1) else {
1248 return alias_index + 1;
1249 };
1250 alias_index = next_index;
1251 }
1252 if let Some((alias_value, alias_quoted)) =
1253 legacy_token_any_identifier(&tokens[alias_index].token)
1254 {
1255 out.push(LegacySimpleTableAliasDecl {
1256 table_end: tokens[func_end - 1].end,
1257 alias_end: tokens[alias_index].end,
1258 alias: alias_value.to_string(),
1259 quoted: alias_quoted,
1260 });
1261 return alias_index + 1;
1262 }
1263 return func_end;
1264 }
1265 return start + 1;
1266 }
1267
1268 if matches!(tokens[start].token, Token::LParen)
1271 && matches!(dialect, Dialect::Databricks)
1272 && legacy_parenthesized_relation_starts_with_values(tokens, start)
1273 {
1274 let Some(paren_end) = legacy_skip_parenthesized(tokens, start) else {
1275 return start + 1;
1276 };
1277 let table_end = tokens[paren_end - 1].end;
1278
1279 let Some(mut alias_index) = legacy_next_non_trivia_token(tokens, paren_end) else {
1280 return paren_end;
1281 };
1282 if legacy_token_matches_keyword(&tokens[alias_index].token, "AS") {
1283 let Some(next_index) = legacy_next_non_trivia_token(tokens, alias_index + 1) else {
1284 return alias_index + 1;
1285 };
1286 alias_index = next_index;
1287 }
1288 let Some((alias_value, alias_quoted)) =
1289 legacy_token_any_identifier(&tokens[alias_index].token)
1290 else {
1291 return paren_end;
1292 };
1293
1294 let mut alias_end = tokens[alias_index].end;
1295 let mut next_cursor = alias_index + 1;
1296 if let Some(cols_start) = legacy_next_non_trivia_token(tokens, alias_index + 1) {
1297 if matches!(tokens[cols_start].token, Token::LParen) {
1298 if let Some(cols_end) = legacy_skip_parenthesized(tokens, cols_start) {
1299 alias_end = tokens[cols_end - 1].end;
1300 next_cursor = cols_end;
1301 }
1302 }
1303 }
1304
1305 out.push(LegacySimpleTableAliasDecl {
1306 table_end,
1307 alias_end,
1308 alias: alias_value.to_string(),
1309 quoted: alias_quoted,
1310 });
1311 return next_cursor;
1312 }
1313
1314 if legacy_token_any_identifier(&tokens[start].token).is_none() {
1316 return start + 1;
1317 }
1318
1319 let mut table_end = tokens[start].end;
1320 let mut cursor = start + 1;
1321
1322 while let Some(dot_index) = legacy_next_non_trivia_token(tokens, cursor) {
1323 if !matches!(tokens[dot_index].token, Token::Period) {
1324 break;
1325 }
1326 let Some(next_index) = legacy_next_non_trivia_token(tokens, dot_index + 1) else {
1327 break;
1328 };
1329 if legacy_token_any_identifier(&tokens[next_index].token).is_none() {
1330 break;
1331 }
1332 table_end = tokens[next_index].end;
1333 cursor = next_index + 1;
1334 }
1335
1336 let Some(mut alias_index) = legacy_next_non_trivia_token(tokens, cursor) else {
1337 return cursor;
1338 };
1339 if legacy_token_matches_keyword(&tokens[alias_index].token, "AS") {
1340 let Some(next_index) = legacy_next_non_trivia_token(tokens, alias_index + 1) else {
1341 return alias_index + 1;
1342 };
1343 alias_index = next_index;
1344 }
1345
1346 let Some((alias_value, alias_quoted)) = legacy_token_any_identifier(&tokens[alias_index].token)
1347 else {
1348 return cursor;
1349 };
1350
1351 out.push(LegacySimpleTableAliasDecl {
1352 table_end,
1353 alias_end: tokens[alias_index].end,
1354 alias: alias_value.to_string(),
1355 quoted: alias_quoted,
1356 });
1357 alias_index + 1
1358}
1359
1360fn legacy_tokenize_with_offsets(sql: &str, dialect: Dialect) -> Option<Vec<LegacyLocatedToken>> {
1361 let dialect = dialect.to_sqlparser_dialect();
1362 let mut tokenizer = Tokenizer::new(dialect.as_ref(), sql);
1363 let tokens = tokenizer.tokenize_with_location().ok()?;
1364
1365 let mut out = Vec::with_capacity(tokens.len());
1366 for token in tokens {
1367 let (_, end) = legacy_token_with_span_offsets(sql, &token)?;
1368 out.push(LegacyLocatedToken {
1369 token: token.token,
1370 end,
1371 });
1372 }
1373 Some(out)
1374}
1375
1376fn legacy_token_with_span_offsets(sql: &str, token: &TokenWithSpan) -> Option<(usize, usize)> {
1377 let start = legacy_line_col_to_offset(
1378 sql,
1379 token.span.start.line as usize,
1380 token.span.start.column as usize,
1381 )?;
1382 let end = legacy_line_col_to_offset(
1383 sql,
1384 token.span.end.line as usize,
1385 token.span.end.column as usize,
1386 )?;
1387 Some((start, end))
1388}
1389
1390fn legacy_line_col_to_offset(sql: &str, line: usize, column: usize) -> Option<usize> {
1391 if line == 0 || column == 0 {
1392 return None;
1393 }
1394
1395 let mut current_line = 1usize;
1396 let mut current_col = 1usize;
1397
1398 for (offset, ch) in sql.char_indices() {
1399 if current_line == line && current_col == column {
1400 return Some(offset);
1401 }
1402
1403 if ch == '\n' {
1404 current_line += 1;
1405 current_col = 1;
1406 } else {
1407 current_col += 1;
1408 }
1409 }
1410
1411 if current_line == line && current_col == column {
1412 return Some(sql.len());
1413 }
1414
1415 None
1416}
1417
1418fn legacy_next_non_trivia_token(tokens: &[LegacyLocatedToken], mut start: usize) -> Option<usize> {
1419 while start < tokens.len() {
1420 if !legacy_is_trivia_token(&tokens[start].token) {
1421 return Some(start);
1422 }
1423 start += 1;
1424 }
1425 None
1426}
1427
1428fn legacy_skip_lateral_function_call(tokens: &[LegacyLocatedToken], start: usize) -> Option<usize> {
1431 let func_index = legacy_next_non_trivia_token(tokens, start)?;
1433 legacy_token_any_identifier(&tokens[func_index].token)?;
1434 let lparen_index = legacy_next_non_trivia_token(tokens, func_index + 1)?;
1435 if !matches!(tokens[lparen_index].token, Token::LParen) {
1436 return None;
1437 }
1438 let mut depth = 1u32;
1440 let mut cursor = lparen_index + 1;
1441 while cursor < tokens.len() && depth > 0 {
1442 match &tokens[cursor].token {
1443 Token::LParen => depth += 1,
1444 Token::RParen => depth -= 1,
1445 _ => {}
1446 }
1447 cursor += 1;
1448 }
1449 if depth == 0 {
1450 Some(cursor)
1451 } else {
1452 None
1453 }
1454}
1455
1456fn legacy_parenthesized_relation_starts_with_values(
1457 tokens: &[LegacyLocatedToken],
1458 lparen_index: usize,
1459) -> bool {
1460 let Some(first_inner) = legacy_next_non_trivia_token(tokens, lparen_index + 1) else {
1461 return false;
1462 };
1463 legacy_token_matches_keyword(&tokens[first_inner].token, "VALUES")
1464}
1465
1466fn legacy_skip_parenthesized(tokens: &[LegacyLocatedToken], lparen_index: usize) -> Option<usize> {
1467 if !matches!(tokens.get(lparen_index)?.token, Token::LParen) {
1468 return None;
1469 }
1470 let mut depth = 1u32;
1471 let mut cursor = lparen_index + 1;
1472 while cursor < tokens.len() && depth > 0 {
1473 match &tokens[cursor].token {
1474 Token::LParen => depth += 1,
1475 Token::RParen => depth -= 1,
1476 _ => {}
1477 }
1478 cursor += 1;
1479 }
1480 if depth == 0 {
1481 Some(cursor)
1482 } else {
1483 None
1484 }
1485}
1486
1487fn legacy_is_trivia_token(token: &Token) -> bool {
1488 matches!(
1489 token,
1490 Token::Whitespace(
1491 Whitespace::Space
1492 | Whitespace::Newline
1493 | Whitespace::Tab
1494 | Whitespace::SingleLineComment { .. }
1495 | Whitespace::MultiLineComment(_)
1496 )
1497 )
1498}
1499
1500fn legacy_token_matches_keyword(token: &Token, keyword: &str) -> bool {
1501 matches!(token, Token::Word(word) if word.value.eq_ignore_ascii_case(keyword))
1502}
1503
1504fn legacy_token_any_identifier(token: &Token) -> Option<(&str, bool)> {
1507 match token {
1508 Token::Word(word) if legacy_is_simple_identifier(&word.value) => {
1509 if word.quote_style.is_some() {
1510 Some((&word.value, true))
1511 } else {
1512 Some((&word.value, false))
1513 }
1514 }
1515 Token::SingleQuotedString(s) => Some((s.as_str(), true)),
1516 _ => None,
1517 }
1518}
1519
1520fn legacy_contains_alias_qualifier_dialect(
1521 sql: &str,
1522 alias: &str,
1523 alias_quoted: bool,
1524 dialect: Dialect,
1525 alias_case_check: AliasCaseCheck,
1526) -> bool {
1527 if matches!(dialect, Dialect::Redshift)
1528 && legacy_redshift_qualify_uses_alias_prefixed_identifier(sql, alias)
1529 {
1530 return true;
1531 }
1532
1533 let alias_bytes = alias.as_bytes();
1534 if alias_bytes.is_empty() {
1535 return false;
1536 }
1537
1538 let normalized_alias = match alias_case_check {
1540 AliasCaseCheck::Dialect => normalize_identifier_for_dialect(alias, alias_quoted, dialect),
1541 AliasCaseCheck::CaseInsensitive => alias.to_ascii_lowercase(),
1542 AliasCaseCheck::CaseSensitive => alias.to_string(),
1543 AliasCaseCheck::QuotedCsNakedUpper => {
1544 if alias_quoted {
1545 alias.to_string()
1546 } else {
1547 alias.to_ascii_uppercase()
1548 }
1549 }
1550 AliasCaseCheck::QuotedCsNakedLower => {
1551 if alias_quoted {
1552 alias.to_string()
1553 } else {
1554 alias.to_ascii_lowercase()
1555 }
1556 }
1557 };
1558
1559 let bytes = sql.as_bytes();
1560 let mut index = 0usize;
1561 while index < bytes.len() {
1562 let (ref_name, ref_quoted, ref_end) = if index < bytes.len()
1564 && (bytes[index] == b'"' || bytes[index] == b'`' || bytes[index] == b'[')
1565 {
1566 let close_char = match bytes[index] {
1567 b'"' => b'"',
1568 b'`' => b'`',
1569 b'[' => b']',
1570 _ => unreachable!(),
1571 };
1572 let start = index + 1;
1573 let mut end = start;
1574 while end < bytes.len() && bytes[end] != close_char {
1575 end += 1;
1576 }
1577 if end >= bytes.len() {
1578 index += 1;
1579 continue;
1580 }
1581 let name = &sql[start..end];
1582 (name.to_string(), true, end + 1)
1584 } else if index < bytes.len() && legacy_is_ascii_ident_start(bytes[index]) {
1585 let start = index;
1586 let mut end = start;
1587 while end < bytes.len() && legacy_is_ascii_ident_continue(bytes[end]) {
1588 end += 1;
1589 }
1590 let name = &sql[start..end];
1591 (name.to_string(), false, end)
1592 } else {
1593 index += 1;
1594 continue;
1595 };
1596
1597 if ref_end < bytes.len() && bytes[ref_end] == b'.' {
1599 let normalized_ref = match alias_case_check {
1600 AliasCaseCheck::Dialect => {
1601 normalize_identifier_for_dialect(&ref_name, ref_quoted, dialect)
1602 }
1603 AliasCaseCheck::CaseInsensitive => ref_name.to_ascii_lowercase(),
1604 AliasCaseCheck::CaseSensitive => ref_name.clone(),
1605 AliasCaseCheck::QuotedCsNakedUpper => {
1606 if ref_quoted {
1607 ref_name.clone()
1608 } else {
1609 ref_name.to_ascii_uppercase()
1610 }
1611 }
1612 AliasCaseCheck::QuotedCsNakedLower => {
1613 if ref_quoted {
1614 ref_name.clone()
1615 } else {
1616 ref_name.to_ascii_lowercase()
1617 }
1618 }
1619 };
1620 if normalized_ref == normalized_alias {
1621 return true;
1622 }
1623 }
1624
1625 index = if ref_end > index { ref_end } else { index + 1 };
1626 }
1627
1628 false
1629}
1630
1631fn legacy_redshift_qualify_uses_alias_prefixed_identifier(sql: &str, alias: &str) -> bool {
1632 let Some(tokens) = legacy_tokenize_with_offsets(sql, Dialect::Redshift) else {
1633 return false;
1634 };
1635 let Some(qualify_index) = tokens
1636 .iter()
1637 .position(|token| legacy_token_matches_keyword(&token.token, "QUALIFY"))
1638 else {
1639 return false;
1640 };
1641
1642 if tokens[..qualify_index]
1645 .iter()
1646 .any(|token| legacy_token_matches_keyword(&token.token, "WHERE"))
1647 {
1648 return false;
1649 }
1650
1651 tokens[qualify_index + 1..]
1652 .iter()
1653 .filter_map(|token| legacy_token_reference_identifier(&token.token))
1654 .any(|identifier| legacy_alias_prefixed_identifier(identifier, alias))
1655}
1656
1657fn legacy_token_reference_identifier(token: &Token) -> Option<&str> {
1658 match token {
1659 Token::Word(word) => Some(word.value.as_str()),
1660 _ => None,
1661 }
1662}
1663
1664fn legacy_alias_prefixed_identifier(identifier: &str, alias: &str) -> bool {
1665 if identifier.is_empty() || alias.is_empty() {
1666 return false;
1667 }
1668 identifier
1669 .to_ascii_uppercase()
1670 .strip_prefix(&alias.to_ascii_uppercase())
1671 .is_some_and(|suffix| suffix.starts_with('_'))
1672}
1673
1674fn legacy_is_generated_alias_identifier(alias: &str) -> bool {
1675 let mut chars = alias.chars();
1676 match chars.next() {
1677 Some('t') => {}
1678 _ => return false,
1679 }
1680 let mut saw_digit = false;
1681 for ch in chars {
1682 if !ch.is_ascii_digit() {
1683 return false;
1684 }
1685 saw_digit = true;
1686 }
1687 saw_digit
1688}
1689
1690fn legacy_is_sql_keyword(token: &str) -> bool {
1691 matches!(
1692 token.to_ascii_uppercase().as_str(),
1693 "ALL"
1694 | "ALTER"
1695 | "AND"
1696 | "ANY"
1697 | "AS"
1698 | "ASC"
1699 | "BEGIN"
1700 | "BETWEEN"
1701 | "BOOLEAN"
1702 | "BY"
1703 | "CASE"
1704 | "CAST"
1705 | "CHECK"
1706 | "COLUMN"
1707 | "CONSTRAINT"
1708 | "CREATE"
1709 | "CROSS"
1710 | "DEFAULT"
1711 | "DELETE"
1712 | "DESC"
1713 | "DISTINCT"
1714 | "DROP"
1715 | "ELSE"
1716 | "END"
1717 | "EXCEPT"
1718 | "EXISTS"
1719 | "FALSE"
1720 | "FETCH"
1721 | "FOR"
1722 | "FOREIGN"
1723 | "FROM"
1724 | "FULL"
1725 | "GROUP"
1726 | "HAVING"
1727 | "IF"
1728 | "IN"
1729 | "INDEX"
1730 | "INNER"
1731 | "INSERT"
1732 | "INT"
1733 | "INTEGER"
1734 | "INTERSECT"
1735 | "INTO"
1736 | "IS"
1737 | "JOIN"
1738 | "KEY"
1739 | "LEFT"
1740 | "LIKE"
1741 | "LIMIT"
1742 | "NOT"
1743 | "NULL"
1744 | "OFFSET"
1745 | "ON"
1746 | "OR"
1747 | "ORDER"
1748 | "OUTER"
1749 | "OVER"
1750 | "PARTITION"
1751 | "PRIMARY"
1752 | "REFERENCES"
1753 | "RIGHT"
1754 | "SELECT"
1755 | "SET"
1756 | "TABLE"
1757 | "TEXT"
1758 | "THEN"
1759 | "TRUE"
1760 | "UNION"
1761 | "UNIQUE"
1762 | "UPDATE"
1763 | "USING"
1764 | "VALUES"
1765 | "VARCHAR"
1766 | "VIEW"
1767 | "WHEN"
1768 | "WHERE"
1769 | "WINDOW"
1770 | "WITH"
1771 )
1772}
1773
1774fn legacy_is_simple_identifier(value: &str) -> bool {
1775 let bytes = value.as_bytes();
1776 if bytes.is_empty() || !legacy_is_ascii_ident_start(bytes[0]) {
1777 return false;
1778 }
1779 bytes[1..]
1780 .iter()
1781 .copied()
1782 .all(legacy_is_ascii_ident_continue)
1783}
1784
1785fn legacy_is_ascii_ident_start(byte: u8) -> bool {
1786 byte.is_ascii_alphabetic() || byte == b'_' || byte == b'#'
1787}
1788
1789fn legacy_is_ascii_ident_continue(byte: u8) -> bool {
1790 byte.is_ascii_alphanumeric() || byte == b'_'
1791}
1792
1793#[cfg(test)]
1794mod tests {
1795 use super::*;
1796 use crate::linter::config::LintConfig;
1797 use crate::linter::rule::with_active_dialect;
1798 use crate::parser::{parse_sql, parse_sql_with_dialect};
1799 use crate::types::{Dialect, IssueAutofixApplicability};
1800
1801 fn check_sql(sql: &str) -> Vec<Issue> {
1802 let stmts = parse_sql(sql).unwrap();
1803 let rule = UnusedTableAlias::default();
1804 let ctx = LintContext {
1805 sql,
1806 statement_range: 0..sql.len(),
1807 statement_index: 0,
1808 };
1809 let mut issues = Vec::new();
1810 for stmt in &stmts {
1811 issues.extend(rule.check(stmt, &ctx));
1812 }
1813 issues
1814 }
1815
1816 fn apply_issue_autofix(sql: &str, issue: &Issue) -> Option<String> {
1817 let autofix = issue.autofix.as_ref()?;
1818 let mut edits = autofix.edits.clone();
1819 edits.sort_by_key(|edit| (edit.span.start, edit.span.end));
1820 let mut rewritten = sql.to_string();
1821 for edit in edits.into_iter().rev() {
1822 rewritten.replace_range(edit.span.start..edit.span.end, &edit.replacement);
1823 }
1824 Some(rewritten)
1825 }
1826
1827 fn check_sql_in_dialect(sql: &str, dialect: Dialect) -> Vec<Issue> {
1828 let stmts = parse_sql_with_dialect(sql, dialect).unwrap();
1829 let rule = UnusedTableAlias::default();
1830 let ctx = LintContext {
1831 sql,
1832 statement_range: 0..sql.len(),
1833 statement_index: 0,
1834 };
1835 let mut issues = Vec::new();
1836 with_active_dialect(dialect, || {
1837 for stmt in &stmts {
1838 issues.extend(rule.check(stmt, &ctx));
1839 }
1840 });
1841 issues
1842 }
1843
1844 #[test]
1845 fn test_unused_alias_detected() {
1846 let issues = check_sql("SELECT * FROM users u JOIN orders o ON users.id = orders.user_id");
1847 assert_eq!(issues.len(), 2);
1849 assert_eq!(issues[0].code, "LINT_AL_005");
1850 }
1851
1852 #[test]
1853 fn test_unused_alias_emits_safe_autofix_patch() {
1854 let sql = "SELECT users.name FROM users AS u JOIN orders AS o ON users.id = orders.user_id";
1855 let issues = check_sql(sql);
1856 assert_eq!(issues.len(), 2);
1857 let autofix = issues[0].autofix.as_ref().expect("autofix metadata");
1858 assert_eq!(autofix.applicability, IssueAutofixApplicability::Safe);
1859 let fixed = apply_issue_autofix(sql, &issues[0]).expect("apply autofix");
1860 assert_eq!(
1861 fixed,
1862 "SELECT users.name FROM users JOIN orders ON users.id = orders.user_id"
1863 );
1864 }
1865
1866 #[test]
1867 fn test_generated_alias_does_not_emit_autofix() {
1868 let sql = "SELECT * FROM users AS t1";
1869 let issues = check_sql(sql);
1870 assert_eq!(issues.len(), 1);
1871 assert!(
1872 issues[0].autofix.is_none(),
1873 "legacy AL005 parity skips generated aliases like t1"
1874 );
1875 }
1876
1877 #[test]
1878 fn test_used_alias_ok() {
1879 let issues = check_sql("SELECT u.name FROM users u JOIN orders o ON u.id = o.user_id");
1880 assert!(issues.is_empty());
1881 }
1882
1883 #[test]
1884 fn test_single_table_unused_alias_detected() {
1885 let issues = check_sql("SELECT * FROM users u");
1886 assert_eq!(issues.len(), 1);
1887 assert!(issues[0].message.contains("u"));
1888 }
1889
1890 #[test]
1891 fn test_single_table_alias_used_ok() {
1892 let issues = check_sql("SELECT u.id FROM users u");
1893 assert!(issues.is_empty());
1894 }
1895
1896 #[test]
1899 fn test_alias_used_in_where() {
1900 let issues = check_sql(
1901 "SELECT u.name FROM users u JOIN orders o ON u.id = o.user_id WHERE u.active = true",
1902 );
1903 assert!(issues.is_empty());
1904 }
1905
1906 #[test]
1907 fn test_alias_used_in_group_by() {
1908 let issues = check_sql(
1909 "SELECT u.name, COUNT(*) FROM users u JOIN orders o ON u.id = o.user_id GROUP BY u.name",
1910 );
1911 assert!(issues.is_empty());
1912 }
1913
1914 #[test]
1915 fn test_alias_used_in_having() {
1916 let issues = check_sql(
1917 "SELECT u.name, COUNT(o.id) FROM users u JOIN orders o ON u.id = o.user_id \
1918 GROUP BY u.name HAVING COUNT(o.id) > 5",
1919 );
1920 assert!(issues.is_empty());
1921 }
1922
1923 #[test]
1924 fn test_alias_used_in_qualified_wildcard() {
1925 let issues = check_sql("SELECT u.* FROM users u JOIN orders o ON u.id = o.user_id");
1927 assert!(issues.is_empty());
1928 }
1929
1930 #[test]
1931 fn test_alias_unused_despite_qualified_wildcard() {
1932 let issues = check_sql("SELECT u.* FROM users u JOIN orders o ON u.id = orders.user_id");
1934 assert_eq!(issues.len(), 1);
1935 assert!(issues[0].message.contains("o"));
1936 }
1937
1938 #[test]
1939 fn test_partial_alias_usage() {
1940 let issues = check_sql("SELECT u.name FROM users u JOIN orders o ON u.id = orders.user_id");
1942 assert_eq!(issues.len(), 1);
1943 assert!(issues[0].message.contains("o"));
1944 }
1945
1946 #[test]
1947 fn test_three_tables_one_unused() {
1948 let issues = check_sql(
1949 "SELECT a.name, b.total \
1950 FROM users a \
1951 JOIN orders b ON a.id = b.user_id \
1952 JOIN products c ON b.product_id = products.id",
1953 );
1954 assert_eq!(issues.len(), 1);
1955 assert!(issues[0].message.contains("c"));
1956 }
1957
1958 #[test]
1959 fn test_no_aliases_ok() {
1960 let issues =
1961 check_sql("SELECT users.name FROM users JOIN orders ON users.id = orders.user_id");
1962 assert!(issues.is_empty());
1963 }
1964
1965 #[test]
1966 fn test_self_join_with_aliases() {
1967 let issues =
1968 check_sql("SELECT a.name, b.name FROM users a JOIN users b ON a.manager_id = b.id");
1969 assert!(issues.is_empty());
1970 }
1971
1972 #[test]
1973 fn test_alias_in_case_expression() {
1974 let issues = check_sql(
1975 "SELECT CASE WHEN u.active THEN 'yes' ELSE 'no' END \
1976 FROM users u JOIN orders o ON u.id = o.user_id",
1977 );
1978 assert!(issues.is_empty());
1980 }
1981
1982 #[test]
1983 fn test_alias_used_in_order_by() {
1984 let issues = check_sql(
1985 "SELECT u.name \
1986 FROM users u \
1987 JOIN orders o ON users.id = orders.user_id \
1988 ORDER BY o.created_at",
1989 );
1990 assert!(issues.is_empty());
1991 }
1992
1993 #[test]
1994 fn test_left_join_alias_used_in_on_clause() {
1995 let issues = check_sql("SELECT u.name FROM users u LEFT JOIN orders o ON u.id = o.user_id");
1996 assert!(issues.is_empty());
1997 }
1998
1999 #[test]
2000 fn test_alias_used_only_in_correlated_exists_subquery() {
2001 let issues = check_sql(
2002 "SELECT 1 \
2003 FROM users u \
2004 JOIN orders o ON 1 = 1 \
2005 WHERE EXISTS (SELECT 1 WHERE u.id = o.user_id)",
2006 );
2007 assert!(issues.is_empty());
2008 }
2009
2010 #[test]
2011 fn test_alias_used_in_qualify_clause() {
2012 let issues = check_sql(
2013 "SELECT u.id \
2014 FROM users u \
2015 JOIN orders o ON users.id = orders.user_id \
2016 QUALIFY ROW_NUMBER() OVER (PARTITION BY o.user_id ORDER BY o.user_id) = 1",
2017 );
2018 assert!(issues.is_empty());
2019 }
2020
2021 #[test]
2022 fn test_alias_used_in_named_window_clause() {
2023 let issues = check_sql(
2024 "SELECT SUM(u.id) OVER w \
2025 FROM users u \
2026 JOIN orders o ON users.id = orders.user_id \
2027 WINDOW w AS (PARTITION BY o.user_id)",
2028 );
2029 assert!(issues.is_empty());
2030 }
2031
2032 #[test]
2033 fn test_unused_derived_alias_detected() {
2034 let issues = check_sql(
2035 "SELECT u.id \
2036 FROM users u \
2037 JOIN (SELECT id FROM orders) o2 ON u.id = u.id",
2038 );
2039 assert!(issues.is_empty());
2040 }
2041
2042 #[test]
2043 fn test_lateral_alias_is_ignored() {
2044 let issues = check_sql("SELECT u.id FROM users u JOIN LATERAL (SELECT 1) lx ON TRUE");
2045 assert!(issues.is_empty());
2046 }
2047
2048 #[test]
2049 fn test_alias_used_only_in_lateral_subquery_relation() {
2050 let issues = check_sql(
2051 "SELECT 1 \
2052 FROM users u \
2053 JOIN LATERAL (SELECT u.id) lx ON TRUE",
2054 );
2055 assert!(issues.is_empty());
2056 }
2057
2058 #[test]
2059 fn test_alias_used_only_in_unnest_join_relation() {
2060 let issues = check_sql(
2061 "SELECT 1 \
2062 FROM users u \
2063 LEFT JOIN UNNEST(u.tags) tag ON TRUE",
2064 );
2065 assert!(issues.is_empty());
2066 }
2067
2068 #[test]
2069 fn alias_case_check_case_sensitive_flags_case_mismatch() {
2070 let config = LintConfig {
2071 enabled: true,
2072 disabled_rules: vec![],
2073 rule_configs: std::collections::BTreeMap::from([(
2074 "aliasing.unused".to_string(),
2075 serde_json::json!({"alias_case_check": "case_sensitive"}),
2076 )]),
2077 };
2078 let rule = UnusedTableAlias::from_config(&config);
2079 let sql = "SELECT zoo.id, b.id FROM users AS \"Zoo\" JOIN books b ON zoo.id = b.user_id";
2080 let stmts = parse_sql(sql).expect("parse");
2081 let issues = rule.check(
2082 &stmts[0],
2083 &LintContext {
2084 sql,
2085 statement_range: 0..sql.len(),
2086 statement_index: 0,
2087 },
2088 );
2089 assert_eq!(issues.len(), 1);
2090 assert!(issues[0].message.contains("Zoo"));
2091 }
2092
2093 #[test]
2094 fn alias_case_check_case_insensitive_allows_case_mismatch() {
2095 let config = LintConfig {
2096 enabled: true,
2097 disabled_rules: vec![],
2098 rule_configs: std::collections::BTreeMap::from([(
2099 "LINT_AL_005".to_string(),
2100 serde_json::json!({"alias_case_check": "case_insensitive"}),
2101 )]),
2102 };
2103 let rule = UnusedTableAlias::from_config(&config);
2104 let sql = "SELECT zoo.id, b.id FROM users AS \"Zoo\" JOIN books b ON zoo.id = b.user_id";
2105 let stmts = parse_sql(sql).expect("parse");
2106 let issues = rule.check(
2107 &stmts[0],
2108 &LintContext {
2109 sql,
2110 statement_range: 0..sql.len(),
2111 statement_index: 0,
2112 },
2113 );
2114 assert!(issues.is_empty());
2115 }
2116
2117 #[test]
2118 fn alias_case_check_quoted_cs_naked_upper_allows_unquoted_upper_fold_for_quoted_alias() {
2119 let config = LintConfig {
2120 enabled: true,
2121 disabled_rules: vec![],
2122 rule_configs: std::collections::BTreeMap::from([(
2123 "aliasing.unused".to_string(),
2124 serde_json::json!({"alias_case_check": "quoted_cs_naked_upper"}),
2125 )]),
2126 };
2127 let rule = UnusedTableAlias::from_config(&config);
2128 let sql = "SELECT foo.id, b.id FROM users AS \"FOO\" JOIN books b ON foo.id = b.user_id";
2129 let stmts = parse_sql(sql).expect("parse");
2130 let issues = rule.check(
2131 &stmts[0],
2132 &LintContext {
2133 sql,
2134 statement_range: 0..sql.len(),
2135 statement_index: 0,
2136 },
2137 );
2138 assert!(issues.is_empty());
2139 }
2140
2141 #[test]
2142 fn alias_case_check_quoted_cs_naked_lower_allows_unquoted_lower_fold_for_quoted_alias() {
2143 let config = LintConfig {
2144 enabled: true,
2145 disabled_rules: vec![],
2146 rule_configs: std::collections::BTreeMap::from([(
2147 "aliasing.unused".to_string(),
2148 serde_json::json!({"alias_case_check": "quoted_cs_naked_lower"}),
2149 )]),
2150 };
2151 let rule = UnusedTableAlias::from_config(&config);
2152 let sql = "SELECT FOO.id, b.id FROM users AS \"foo\" JOIN books b ON FOO.id = b.user_id";
2153 let stmts = parse_sql(sql).expect("parse");
2154 let issues = rule.check(
2155 &stmts[0],
2156 &LintContext {
2157 sql,
2158 statement_range: 0..sql.len(),
2159 statement_index: 0,
2160 },
2161 );
2162 assert!(issues.is_empty());
2163 }
2164
2165 #[test]
2166 fn dialect_mode_generic_allows_quoted_unquoted_fold_match() {
2167 let issues = check_sql("SELECT a.col1 FROM tab1 AS \"A\"");
2168 assert!(issues.is_empty());
2169 }
2170
2171 #[test]
2172 fn dialect_mode_generic_allows_quoted_prefix_against_unquoted_alias() {
2173 let issues = check_sql("SELECT \"A\".col1 FROM tab1 AS a");
2174 assert!(issues.is_empty());
2175 }
2176
2177 #[test]
2178 fn dialect_mode_generic_flags_single_quoted_alias_case_mismatch() {
2179 let issues = check_sql("SELECT a.col1 FROM tab1 AS 'a'");
2180 assert_eq!(issues.len(), 1);
2181 assert!(issues[0].message.contains("a"));
2182 }
2183
2184 #[test]
2185 fn dialect_mode_postgres_allows_lower_fold_for_quoted_alias() {
2186 let issues =
2187 check_sql_in_dialect("SELECT A.col_1 FROM table_a AS \"a\"", Dialect::Postgres);
2188 assert!(issues.is_empty());
2189 }
2190
2191 #[test]
2192 fn dialect_mode_snowflake_flags_mixed_quoted_case_mismatch() {
2193 let issues =
2194 check_sql_in_dialect("SELECT a.col_1 FROM table_a AS \"a\"", Dialect::Snowflake);
2195 assert_eq!(issues.len(), 1);
2196 assert!(issues[0].message.contains("a"));
2197 }
2198
2199 #[test]
2200 fn dialect_mode_bigquery_allows_backtick_quoted_alias_fold_match() {
2201 let issues = check_sql_in_dialect("SELECT a.col1 FROM tab1 AS `A`", Dialect::Bigquery);
2202 assert!(issues.is_empty());
2203 }
2204
2205 #[test]
2206 fn dialect_mode_redshift_allows_lower_fold_for_quoted_alias() {
2207 let issues =
2208 check_sql_in_dialect("SELECT A.col_1 FROM table_a AS \"a\"", Dialect::Redshift);
2209 assert!(issues.is_empty());
2210 }
2211
2212 #[test]
2213 fn dialect_mode_redshift_flags_mixed_quoted_case_mismatch() {
2214 let issues =
2215 check_sql_in_dialect("SELECT a.col_1 FROM table_a AS \"A\"", Dialect::Redshift);
2216 assert_eq!(issues.len(), 1);
2217 assert!(issues[0].message.contains("A"));
2218 }
2219
2220 #[test]
2221 fn dialect_mode_mysql_allows_backtick_qualified_reference_against_unquoted_alias() {
2222 let issues = check_sql_in_dialect(
2223 "SELECT `nih`.`userID` FROM `flight_notification_item_history` AS nih",
2224 Dialect::Mysql,
2225 );
2226 assert!(issues.is_empty());
2227 }
2228
2229 #[test]
2230 fn dialect_mode_duckdb_allows_case_insensitive_quoted_reference() {
2231 let issues = check_sql_in_dialect("SELECT \"a\".col_1 FROM table_a AS A", Dialect::Duckdb);
2232 assert!(issues.is_empty());
2233 }
2234
2235 #[test]
2236 fn dialect_mode_hive_allows_case_insensitive_quoted_reference() {
2237 let issues = check_sql_in_dialect("SELECT `a`.col1 FROM tab1 AS A", Dialect::Hive);
2238 assert!(issues.is_empty());
2239 }
2240
2241 #[test]
2242 fn flags_inner_subquery_unused_alias() {
2243 let issues = check_sql("SELECT * FROM (SELECT * FROM my_tbl AS foo)");
2244 assert_eq!(issues.len(), 1);
2245 assert!(issues[0].message.contains("foo"));
2246 }
2247
2248 #[test]
2249 fn allows_unreferenced_subquery_alias() {
2250 let issues = check_sql("SELECT * FROM (SELECT 1 AS a) subquery");
2251 assert!(issues.is_empty());
2252 }
2253
2254 #[test]
2255 fn allows_postgres_generate_series_alias() {
2256 let issues = check_sql_in_dialect(
2257 "SELECT date_trunc('day', dd)::timestamp FROM generate_series('2022-02-01'::timestamp, NOW()::timestamp, '1 day'::interval) dd",
2258 Dialect::Postgres,
2259 );
2260 assert!(issues.is_empty());
2261 }
2262
2263 #[test]
2264 fn flags_unused_snowflake_lateral_flatten_alias() {
2265 let issues = check_sql_in_dialect(
2266 "SELECT a.test1, a.test2, b.test3 \
2267 FROM table1 AS a, \
2268 LATERAL flatten(input => some_field) AS b, \
2269 LATERAL flatten(input => b.value) AS c, \
2270 LATERAL flatten(input => c.value) AS d, \
2271 LATERAL flatten(input => d.value) AS e, \
2272 LATERAL flatten(input => e.value) AS f",
2273 Dialect::Snowflake,
2274 );
2275 assert_eq!(issues.len(), 1);
2276 assert!(issues[0].message.contains("f"));
2277 }
2278
2279 #[test]
2280 fn flags_unused_alias_inside_snowflake_delete_using_cte() {
2281 let issues = check_sql_in_dialect(
2282 "DELETE FROM MYTABLE1 \
2283 USING ( \
2284 WITH MYCTE AS (SELECT COLUMN2 FROM MYTABLE3 AS MT3) \
2285 SELECT COLUMN3 FROM MYTABLE3 \
2286 ) X \
2287 WHERE COLUMN1 = X.COLUMN3",
2288 Dialect::Snowflake,
2289 );
2290 assert_eq!(issues.len(), 1);
2291 assert!(issues[0].message.contains("MT3"));
2292 }
2293
2294 #[test]
2295 fn allows_bigquery_to_json_string_table_alias_argument() {
2296 let issues = check_sql_in_dialect(
2297 "SELECT TO_JSON_STRING(t) FROM my_table AS t",
2298 Dialect::Bigquery,
2299 );
2300 assert!(issues.is_empty());
2301 }
2302
2303 #[test]
2304 fn flags_ansi_to_json_string_table_alias_argument() {
2305 let issues =
2306 check_sql_in_dialect("SELECT TO_JSON_STRING(t) FROM my_table AS t", Dialect::Ansi);
2307 assert_eq!(issues.len(), 1);
2308 assert!(issues[0].message.contains("t"));
2309 }
2310
2311 #[test]
2312 fn redshift_qualify_after_from_counts_alias_usage() {
2313 let issues = check_sql_in_dialect(
2314 "SELECT * \
2315 FROM store AS s \
2316 INNER JOIN store_sales AS ss \
2317 QUALIFY ROW_NUMBER() OVER (PARTITION BY ss.sold_date ORDER BY ss.sales_price DESC) <= 2",
2318 Dialect::Redshift,
2319 );
2320 assert_eq!(issues.len(), 1);
2321 assert!(issues[0].message.contains("s"));
2322 }
2323
2324 #[test]
2325 fn redshift_qualify_after_where_does_not_count_alias_usage() {
2326 let issues = check_sql_in_dialect(
2327 "SELECT * \
2328 FROM store AS s \
2329 INNER JOIN store_sales AS ss \
2330 WHERE col = 1 \
2331 QUALIFY ROW_NUMBER() OVER (PARTITION BY ss.sold_date ORDER BY ss.sales_price DESC) <= 2",
2332 Dialect::Redshift,
2333 );
2334 assert_eq!(issues.len(), 2);
2335 assert!(issues.iter().any(|issue| issue.message.contains("s")));
2336 assert!(issues.iter().any(|issue| issue.message.contains("ss")));
2337 }
2338
2339 #[test]
2340 fn redshift_qualify_unqualified_alias_prefixed_identifier_counts_alias_usage() {
2341 let issues = check_sql_in_dialect(
2342 "SELECT * \
2343 FROM #store_sales AS ss \
2344 QUALIFY row_number() OVER (PARTITION BY ss_sold_date ORDER BY ss_sales_price DESC) <= 2",
2345 Dialect::Redshift,
2346 );
2347 assert!(issues.is_empty());
2348 }
2349
2350 #[test]
2351 fn redshift_qualify_after_from_autofix_keeps_used_join_alias() {
2352 let sql = "SELECT *\n\
2353FROM #store as s\n\
2354INNER JOIN #store_sales AS ss\n\
2355QUALIFY row_number() OVER (PARTITION BY ss_sold_date ORDER BY ss_sales_price DESC) <= 2";
2356 let issues = check_sql_in_dialect(sql, Dialect::Redshift);
2357 assert_eq!(issues.len(), 1);
2358 let fixed = apply_issue_autofix(sql, &issues[0]).expect("apply autofix");
2359 assert_eq!(
2360 fixed,
2361 "SELECT *\n\
2362FROM #store\n\
2363INNER JOIN #store_sales AS ss\n\
2364QUALIFY row_number() OVER (PARTITION BY ss_sold_date ORDER BY ss_sales_price DESC) <= 2"
2365 );
2366 }
2367
2368 #[test]
2369 fn redshift_qualify_after_where_autofix_removes_both_unused_aliases() {
2370 let sql = "SELECT *\n\
2371FROM #store as s\n\
2372INNER JOIN #store_sales AS ss\n\
2373WHERE col = 1\n\
2374QUALIFY row_number() OVER (PARTITION BY ss_sold_date ORDER BY ss_sales_price DESC) <= 2";
2375 let issues = check_sql_in_dialect(sql, Dialect::Redshift);
2376 assert_eq!(issues.len(), 2);
2377 let fixed = apply_issue_autofix(sql, &issues[0]).expect("apply autofix");
2378 assert_eq!(
2379 fixed,
2380 "SELECT *\n\
2381FROM #store\n\
2382INNER JOIN #store_sales\n\
2383WHERE col = 1\n\
2384QUALIFY row_number() OVER (PARTITION BY ss_sold_date ORDER BY ss_sales_price DESC) <= 2"
2385 );
2386 }
2387
2388 #[test]
2389 fn sparksql_values_derived_alias_is_detected_and_autofixed() {
2390 let sql = "SELECT *\n\
2391FROM (\n\
2392 VALUES (1, 2), (3, 4)\n\
2393) AS t(c1, c2)";
2394 let issues = check_sql_in_dialect(sql, Dialect::Databricks);
2395 assert_eq!(issues.len(), 1);
2396 let fixed = apply_issue_autofix(sql, &issues[0]).expect("apply autofix");
2397 assert_eq!(
2398 fixed,
2399 "SELECT *\n\
2400FROM (\n\
2401 VALUES (1, 2), (3, 4)\n\
2402)"
2403 );
2404 }
2405
2406 #[test]
2407 fn allows_bigquery_implicit_array_table_reference() {
2408 let issues = check_sql_in_dialect(
2409 "WITH table_arr AS (SELECT [1,2,4,2] AS arr) \
2410 SELECT arr \
2411 FROM table_arr AS t, t.arr",
2412 Dialect::Bigquery,
2413 );
2414 assert!(issues.is_empty());
2415 }
2416
2417 #[test]
2418 fn allows_redshift_super_array_relation_reference() {
2419 let issues = check_sql_in_dialect(
2420 "SELECT my_column, my_array_value \
2421 FROM my_schema.my_table AS t, t.super_array AS my_array_value",
2422 Dialect::Redshift,
2423 );
2424 assert!(issues.is_empty());
2425 }
2426
2427 #[test]
2428 fn allows_repeat_referenced_table_aliases() {
2429 let issues = check_sql(
2430 "SELECT ROW_NUMBER() OVER(PARTITION BY a.object_id ORDER BY a.object_id) \
2431 FROM sys.objects a \
2432 CROSS JOIN sys.objects b \
2433 CROSS JOIN sys.objects c",
2434 );
2435 assert!(issues.is_empty());
2436 }
2437
2438 #[test]
2439 fn dialect_mode_databricks_allows_backtick_case_insensitive_reference() {
2440 let issues =
2441 check_sql_in_dialect("SELECT `a`.col_1 FROM table_a AS A", Dialect::Databricks);
2442 assert!(issues.is_empty());
2443 }
2444
2445 #[test]
2446 fn snowflake_json_access_counts_as_alias_usage() {
2447 let issues = check_sql_in_dialect(
2448 "SELECT r.rec:foo::string FROM foo.bar AS r",
2449 Dialect::Snowflake,
2450 );
2451 assert!(issues.is_empty());
2452 }
2453
2454 #[test]
2455 fn snowflake_lateral_flatten_unused_alias_detected_and_fixable() {
2456 let sql = "SELECT r.rec:foo::string, value:bar::string \
2457 FROM foo.bar AS r, LATERAL FLATTEN(input => rec:result) AS x";
2458 let issues = check_sql_in_dialect(sql, Dialect::Snowflake);
2459 assert_eq!(issues.len(), 1);
2460 assert!(issues[0].message.contains("x"));
2461 let autofix = issues[0].autofix.as_ref().expect("autofix metadata");
2462 assert_eq!(autofix.applicability, IssueAutofixApplicability::Safe);
2463 let fixed = apply_issue_autofix(sql, &issues[0]).expect("apply autofix");
2464 assert!(
2465 !fixed.contains("AS x"),
2466 "autofix should remove the unused LATERAL alias"
2467 );
2468 }
2469
2470 #[test]
2471 fn autofix_removes_double_quoted_alias_in_dialect_mode() {
2472 let sql = "SELECT a.col_1\nFROM table_a AS \"A\"\n";
2473 let issues = check_sql_in_dialect(sql, Dialect::Postgres);
2474 assert_eq!(issues.len(), 1);
2475 assert!(issues[0].message.contains("A"));
2476 let autofix = issues[0].autofix.as_ref().expect("autofix metadata");
2477 let fixed = apply_issue_autofix(sql, &issues[0]).expect("apply autofix");
2478 assert_eq!(fixed, "SELECT a.col_1\nFROM table_a\n");
2479 assert_eq!(autofix.applicability, IssueAutofixApplicability::Safe);
2480 }
2481
2482 #[test]
2483 fn autofix_removes_single_quoted_alias() {
2484 let sql = "SELECT a.col1\nFROM tab1 as 'a'\n";
2485 let issues = check_sql(sql);
2486 assert_eq!(issues.len(), 1);
2487 let autofix = issues[0].autofix.as_ref().expect("autofix metadata");
2488 let fixed = apply_issue_autofix(sql, &issues[0]).expect("apply autofix");
2489 assert_eq!(fixed, "SELECT a.col1\nFROM tab1\n");
2490 assert_eq!(autofix.applicability, IssueAutofixApplicability::Safe);
2491 }
2492}