1use crate::linter::config::LintConfig;
7use crate::linter::rule::{LintContext, LintRule};
8use crate::linter::rules::semantic_helpers::visit_selects_in_statement;
9use crate::types::{issue_codes, Dialect, Issue, IssueAutofixApplicability, IssuePatchEdit};
10use sqlparser::ast::{SelectItem, Statement};
11use sqlparser::keywords::Keyword;
12use sqlparser::tokenizer::{Location, Span, Token, TokenWithSpan, Tokenizer};
13
14#[derive(Clone, Copy, Debug, Eq, PartialEq)]
15enum WildcardPolicy {
16 Single,
17 Multiple,
18}
19
20impl WildcardPolicy {
21 fn from_config(config: &LintConfig) -> Self {
22 match config
23 .rule_option_str(issue_codes::LINT_LT_009, "wildcard_policy")
24 .unwrap_or("single")
25 .to_ascii_lowercase()
26 .as_str()
27 {
28 "multiple" | "multi" | "allow_multiple" => Self::Multiple,
29 _ => Self::Single,
30 }
31 }
32}
33
34pub struct LayoutSelectTargets {
35 wildcard_policy: WildcardPolicy,
36}
37
38impl LayoutSelectTargets {
39 pub fn from_config(config: &LintConfig) -> Self {
40 Self {
41 wildcard_policy: WildcardPolicy::from_config(config),
42 }
43 }
44}
45
46impl Default for LayoutSelectTargets {
47 fn default() -> Self {
48 Self {
49 wildcard_policy: WildcardPolicy::Single,
50 }
51 }
52}
53
54impl LintRule for LayoutSelectTargets {
55 fn code(&self) -> &'static str {
56 issue_codes::LINT_LT_009
57 }
58
59 fn name(&self) -> &'static str {
60 "Layout select targets"
61 }
62
63 fn description(&self) -> &'static str {
64 "Select targets should be on a new line unless there is only one select target."
65 }
66
67 fn check(&self, statement: &Statement, ctx: &LintContext) -> Vec<Issue> {
68 lt09_violation_spans(statement, ctx, self.wildcard_policy)
69 .into_iter()
70 .map(|((start, end), fix_span)| {
71 let mut issue = Issue::info(
72 issue_codes::LINT_LT_009,
73 "Select targets should be on a new line unless there is only one target.",
74 )
75 .with_statement(ctx.statement_index)
76 .with_span(ctx.span_from_statement_offset(start, end));
77 if let Some(fix_edits) = fix_span {
78 let edits: Vec<IssuePatchEdit> = fix_edits
79 .into_iter()
80 .map(|(fix_start, fix_end, replacement)| {
81 IssuePatchEdit::new(
82 ctx.span_from_statement_offset(fix_start, fix_end),
83 replacement,
84 )
85 })
86 .collect();
87 issue = issue.with_autofix_edits(IssueAutofixApplicability::Safe, edits);
88 }
89 issue
90 })
91 .collect()
92 }
93}
94
95#[derive(Clone, Copy, Debug, Eq, PartialEq)]
96struct AstSelectSpec {
97 target_count: usize,
98 has_wildcard: bool,
99}
100
101#[derive(Clone, Debug)]
102struct SelectClauseLayout {
103 select_idx: usize,
104 from_idx: Option<usize>,
105 target_ranges: Vec<(usize, usize)>,
106}
107
108type Lt09Span = (usize, usize);
109type Lt09AutofixEdits = Vec<(usize, usize, String)>;
110type Lt09Violation = (Lt09Span, Option<Lt09AutofixEdits>);
111
112fn lt09_violation_spans(
113 statement: &Statement,
114 ctx: &LintContext,
115 wildcard_policy: WildcardPolicy,
116) -> Vec<Lt09Violation> {
117 let sql = ctx.statement_sql();
118 let tokens = tokenized_for_context(ctx).or_else(|| tokenized(sql, ctx.dialect()));
119 let Some(tokens) = tokens else {
120 return Vec::new();
121 };
122
123 let ast_specs = collect_ast_select_specs(statement);
124 let layouts = collect_select_clause_layouts(&tokens);
125 let mut spans = Vec::new();
126
127 for (idx, layout) in layouts.iter().enumerate() {
128 if layout.target_ranges.is_empty() {
129 continue;
130 }
131
132 let token_target_count = layout.target_ranges.len();
133 let token_single_wildcard =
134 token_target_count == 1 && target_range_is_wildcard(&tokens, layout.target_ranges[0]);
135
136 let mut effective_target_count = token_target_count;
137 let mut has_wildcard = token_single_wildcard;
138 if let Some(spec) = ast_specs.get(idx) {
139 if spec.target_count == token_target_count {
140 effective_target_count = spec.target_count;
141 has_wildcard = spec.has_wildcard;
142 } else if token_target_count == 1 {
143 has_wildcard = spec.has_wildcard || token_single_wildcard;
144 }
145 }
146
147 let is_single_target = effective_target_count == 1
148 && (!has_wildcard || matches!(wildcard_policy, WildcardPolicy::Single));
149
150 let violation = if is_single_target {
151 single_target_layout_violation(layout, &tokens)
152 } else {
153 multiple_target_layout_violation(layout, &tokens)
154 };
155
156 if !violation {
157 continue;
158 }
159
160 let token = &tokens[layout.select_idx];
161 let Some((start, end)) = token_with_span_offsets(sql, token) else {
162 continue;
163 };
164 let fix_span = if is_single_target {
165 safe_single_target_collapse_span(sql, &tokens, layout)
166 } else {
167 safe_multi_target_reflow_span(sql, &tokens, layout)
168 .or_else(|| safe_from_newline_fix_span(sql, &tokens, layout))
169 };
170
171 spans.push(((start, end), fix_span));
172 }
173
174 spans
175}
176
177fn tokenized(sql: &str, dialect: Dialect) -> Option<Vec<TokenWithSpan>> {
178 let dialect = dialect.to_sqlparser_dialect();
179 let mut tokenizer = Tokenizer::new(dialect.as_ref(), sql);
180 tokenizer.tokenize_with_location().ok()
181}
182
183fn tokenized_for_context(ctx: &LintContext) -> Option<Vec<TokenWithSpan>> {
184 let (statement_start_line, statement_start_column) =
185 offset_to_line_col(ctx.sql, ctx.statement_range.start)?;
186
187 ctx.with_document_tokens(|tokens| {
188 if tokens.is_empty() {
189 return None;
190 }
191
192 let mut out = Vec::new();
193 for token in tokens {
194 let Some((start, end)) = token_with_span_offsets(ctx.sql, token) else {
195 continue;
196 };
197 if start < ctx.statement_range.start || end > ctx.statement_range.end {
198 continue;
199 }
200
201 let Some(start_loc) = relative_location(
202 token.span.start,
203 statement_start_line,
204 statement_start_column,
205 ) else {
206 continue;
207 };
208 let Some(end_loc) =
209 relative_location(token.span.end, statement_start_line, statement_start_column)
210 else {
211 continue;
212 };
213
214 out.push(TokenWithSpan::new(
215 token.token.clone(),
216 Span::new(start_loc, end_loc),
217 ));
218 }
219
220 if out.is_empty() {
221 None
222 } else {
223 Some(out)
224 }
225 })
226}
227
228fn collect_ast_select_specs(statement: &Statement) -> Vec<AstSelectSpec> {
229 let mut specs = Vec::new();
230 visit_selects_in_statement(statement, &mut |select| {
231 let has_wildcard = select.projection.iter().any(|item| {
232 matches!(
233 item,
234 SelectItem::Wildcard(_) | SelectItem::QualifiedWildcard(_, _)
235 )
236 });
237 specs.push(AstSelectSpec {
238 target_count: select.projection.len(),
239 has_wildcard,
240 });
241 });
242 specs
243}
244
245fn collect_select_clause_layouts(tokens: &[TokenWithSpan]) -> Vec<SelectClauseLayout> {
246 let mut depth = 0usize;
247 let mut layouts = Vec::new();
248
249 for (idx, token) in tokens.iter().enumerate() {
250 if is_select_keyword(&token.token) {
251 let (clause_end, from_idx) = find_select_clause_end(tokens, idx, depth);
252 if let Some(first_target_idx) = find_first_target_idx(tokens, idx + 1, clause_end) {
253 let target_ranges =
254 split_target_ranges(tokens, first_target_idx, clause_end, depth);
255 layouts.push(SelectClauseLayout {
256 select_idx: idx,
257 from_idx,
258 target_ranges,
259 });
260 } else {
261 layouts.push(SelectClauseLayout {
262 select_idx: idx,
263 from_idx,
264 target_ranges: Vec::new(),
265 });
266 }
267 }
268
269 match token.token {
270 Token::LParen => depth += 1,
271 Token::RParen => depth = depth.saturating_sub(1),
272 _ => {}
273 }
274 }
275
276 layouts
277}
278
279fn is_select_keyword(token: &Token) -> bool {
280 matches!(token, Token::Word(word) if word.keyword == Keyword::SELECT)
281}
282
283fn is_select_modifier_keyword(keyword: Keyword) -> bool {
284 matches!(keyword, Keyword::DISTINCT | Keyword::ALL)
285}
286
287fn is_select_clause_boundary_keyword(keyword: Keyword) -> bool {
288 matches!(
289 keyword,
290 Keyword::WHERE
291 | Keyword::GROUP
292 | Keyword::HAVING
293 | Keyword::QUALIFY
294 | Keyword::ORDER
295 | Keyword::LIMIT
296 | Keyword::FETCH
297 | Keyword::UNION
298 | Keyword::EXCEPT
299 | Keyword::INTERSECT
300 | Keyword::WINDOW
301 | Keyword::INTO
302 | Keyword::PREWHERE
303 | Keyword::CLUSTER
304 | Keyword::DISTRIBUTE
305 | Keyword::SORT
306 | Keyword::CONNECT
307 )
308}
309
310fn find_select_clause_end(
311 tokens: &[TokenWithSpan],
312 select_idx: usize,
313 select_depth: usize,
314) -> (usize, Option<usize>) {
315 let mut depth = select_depth;
316 for (idx, token) in tokens.iter().enumerate().skip(select_idx + 1) {
317 match &token.token {
318 Token::LParen => depth += 1,
319 Token::RParen => {
320 if depth == select_depth {
321 return (idx, None);
322 }
323 depth = depth.saturating_sub(1);
324 }
325 Token::SemiColon if depth == select_depth => return (idx, None),
326 Token::Word(word) if depth == select_depth => {
327 if word.keyword == Keyword::FROM {
328 return (idx, Some(idx));
329 }
330 if is_select_clause_boundary_keyword(word.keyword) {
331 return (idx, None);
332 }
333 }
334 _ => {}
335 }
336 }
337
338 (tokens.len(), None)
339}
340
341fn is_ignorable_layout_token(token: &Token) -> bool {
342 matches!(token, Token::Whitespace(_))
343}
344
345fn find_first_target_idx(tokens: &[TokenWithSpan], start: usize, end: usize) -> Option<usize> {
346 let mut i = start;
347 while i < end {
348 let token = &tokens[i];
349 match &token.token {
350 t if is_ignorable_layout_token(t) => {}
351 Token::Word(word) if is_select_modifier_keyword(word.keyword) => {
352 if word.keyword == Keyword::DISTINCT {
354 if let Some(on_idx) = skip_distinct_on_clause(tokens, i + 1, end) {
355 i = on_idx;
356 }
357 }
358 }
359 _ => return Some(i),
360 }
361 i += 1;
362 }
363 None
364}
365
366fn skip_distinct_on_clause(tokens: &[TokenWithSpan], start: usize, end: usize) -> Option<usize> {
369 let mut i = start;
370 while i < end {
372 if is_ignorable_layout_token(&tokens[i].token) {
373 i += 1;
374 continue;
375 }
376 break;
377 }
378 if i >= end {
379 return None;
380 }
381 let Token::Word(word) = &tokens[i].token else {
382 return None;
383 };
384 if word.keyword != Keyword::ON {
385 return None;
386 }
387 i += 1;
388 while i < end {
390 if is_ignorable_layout_token(&tokens[i].token) {
391 i += 1;
392 continue;
393 }
394 break;
395 }
396 if i >= end || !matches!(tokens[i].token, Token::LParen) {
397 return None;
398 }
399 let mut depth = 1usize;
401 i += 1;
402 while i < end && depth > 0 {
403 match tokens[i].token {
404 Token::LParen => depth += 1,
405 Token::RParen => depth -= 1,
406 _ => {}
407 }
408 if depth > 0 {
409 i += 1;
410 }
411 }
412 if depth == 0 {
413 Some(i)
414 } else {
415 None
416 }
417}
418
419fn split_target_ranges(
420 tokens: &[TokenWithSpan],
421 start: usize,
422 end: usize,
423 select_depth: usize,
424) -> Vec<(usize, usize)> {
425 let mut depth = select_depth;
426 let mut ranges = Vec::new();
427 let mut range_start = start;
428
429 for (idx, token) in tokens.iter().enumerate().take(end).skip(start) {
430 match token.token {
431 Token::LParen => depth += 1,
432 Token::RParen => depth = depth.saturating_sub(1),
433 Token::Comma if depth == select_depth => {
434 if let Some(trimmed) = trim_target_range(tokens, range_start, idx) {
435 ranges.push(trimmed);
436 }
437 range_start = idx + 1;
438 }
439 _ => {}
440 }
441 }
442
443 if let Some(trimmed) = trim_target_range(tokens, range_start, end) {
444 ranges.push(trimmed);
445 }
446
447 ranges
448}
449
450fn trim_target_range(
451 tokens: &[TokenWithSpan],
452 mut start: usize,
453 mut end: usize,
454) -> Option<(usize, usize)> {
455 while start < end && is_ignorable_layout_token(&tokens[start].token) {
456 start += 1;
457 }
458
459 while start < end && is_ignorable_layout_token(&tokens[end - 1].token) {
460 end -= 1;
461 }
462
463 (start < end).then_some((start, end))
464}
465
466fn target_range_is_wildcard(tokens: &[TokenWithSpan], range: (usize, usize)) -> bool {
467 let (start, end) = range;
468 let code_tokens: Vec<&Token> = tokens[start..end]
469 .iter()
470 .map(|token| &token.token)
471 .filter(|token| !is_ignorable_layout_token(token))
472 .collect();
473
474 if !matches!(code_tokens.last(), Some(Token::Mul)) {
475 return false;
476 }
477
478 if code_tokens.len() == 1 {
479 return true;
480 }
481
482 code_tokens[..code_tokens.len() - 1]
483 .iter()
484 .enumerate()
485 .all(|(idx, token)| {
486 if idx % 2 == 0 {
487 matches!(token, Token::Word(_))
488 } else {
489 matches!(token, Token::Period)
490 }
491 })
492}
493
494fn last_code_line_before(tokens: &[TokenWithSpan], start: usize, end: usize) -> Option<u64> {
495 let mut line = None;
496 for token in tokens.iter().take(end).skip(start) {
497 if is_ignorable_layout_token(&token.token) || matches!(token.token, Token::Comma) {
498 continue;
499 }
500 line = Some(token.span.end.line);
501 }
502 line
503}
504
505fn single_target_layout_violation(layout: &SelectClauseLayout, tokens: &[TokenWithSpan]) -> bool {
506 let Some((target_start, target_end)) = layout.target_ranges.first().copied() else {
507 return false;
508 };
509
510 let select_line = tokens[layout.select_idx].span.start.line;
511 let target_start_line = tokens[target_start].span.start.line;
512 if target_start_line <= select_line {
513 return false;
514 }
515
516 let target_end_line = tokens[target_end - 1].span.end.line;
517 target_end_line == target_start_line
518}
519
520fn multiple_target_layout_violation(layout: &SelectClauseLayout, tokens: &[TokenWithSpan]) -> bool {
521 for (idx, (target_start, _target_end)) in layout.target_ranges.iter().enumerate() {
522 let target_line = tokens[*target_start].span.start.line;
523 if last_code_line_before(tokens, layout.select_idx, *target_start)
524 .is_some_and(|prev_line| prev_line == target_line)
525 {
526 return true;
527 }
528
529 if idx + 1 == layout.target_ranges.len()
530 && layout
531 .from_idx
532 .is_some_and(|from_idx| tokens[from_idx].span.start.line == target_line)
533 {
534 return true;
535 }
536 }
537
538 false
539}
540
541fn safe_single_target_collapse_span(
546 sql: &str,
547 tokens: &[TokenWithSpan],
548 layout: &SelectClauseLayout,
549) -> Option<Lt09AutofixEdits> {
550 let (target_start_idx, target_end_idx) = layout.target_ranges.first().copied()?;
551
552 let last_pre_target_idx = (layout.select_idx..target_start_idx)
554 .rev()
555 .find(|&idx| !is_ignorable_layout_token(&tokens[idx].token))?;
556
557 let (_, gap_start) = token_with_span_offsets(sql, &tokens[last_pre_target_idx])?;
558 let (gap_end, _) = token_with_span_offsets(sql, &tokens[target_start_idx])?;
559 if gap_start > gap_end || gap_end > sql.len() {
560 return None;
561 }
562
563 let gap = &sql[gap_start..gap_end];
564 let (_, target_text_end) = token_with_span_offsets(sql, &tokens[target_end_idx - 1])?;
565 let target_line = tokens[target_start_idx].span.start.line;
566
567 let has_gap_comments = (last_pre_target_idx + 1..target_start_idx)
569 .any(|idx| comment_token_text(&tokens[idx]).is_some());
570
571 let has_trailing_comments = tokens
573 .iter()
574 .skip(target_end_idx)
575 .take_while(|t| t.span.start.line == target_line)
576 .any(|t| comment_token_text(t).is_some());
577
578 let gap_is_whitespace_only = gap.chars().all(char::is_whitespace) && gap.contains('\n');
579
580 if !has_gap_comments && !has_trailing_comments && gap_is_whitespace_only {
582 return Some(vec![(gap_start, gap_end, " ".to_string())]);
583 }
584
585 let target_text = sql[gap_end..target_text_end].to_string();
589 let target_indent = detect_indent(sql, gap_end);
590
591 let gap_comment_indices: Vec<usize> = tokens
593 .iter()
594 .enumerate()
595 .take(target_start_idx)
596 .skip(last_pre_target_idx + 1)
597 .filter_map(|(idx, token)| comment_token_text(token).map(|_| idx))
598 .collect();
599 let mut trailing_comment_indices: Vec<usize> = Vec::new();
600 for (offset, t) in tokens.iter().enumerate().skip(target_end_idx) {
601 if t.span.start.line != target_line {
602 break;
603 }
604 if comment_token_text(t).is_some() {
605 trailing_comment_indices.push(offset);
606 }
607 }
608
609 let has_subsequent_content = layout.from_idx.is_some()
610 || tokens.iter().skip(target_end_idx).any(|t| {
611 t.span.start.line > target_line
612 && !is_ignorable_layout_token(&t.token)
613 && comment_token_text(t).is_none()
614 });
615
616 let target_line_nl = sql[..gap_end].rfind('\n');
623 let first_gap_comment_start = gap_comment_indices
624 .first()
625 .and_then(|&idx| token_with_span_offsets(sql, &tokens[idx]).map(|(s, _)| s));
626 let two_edit_would_overlap = target_line_nl
627 .zip(first_gap_comment_start)
628 .is_some_and(|(nl, cs)| cs > nl);
629
630 let mut edits = Vec::new();
631
632 if !gap_comment_indices.is_empty()
633 && trailing_comment_indices.is_empty()
634 && !two_edit_would_overlap
635 && has_subsequent_content
636 {
637 let first_comment_idx = gap_comment_indices[0];
640 let (first_comment_start, _) = token_with_span_offsets(sql, &tokens[first_comment_idx])?;
641 edits.push((
642 gap_start,
643 first_comment_start,
644 format!(" {target_text}\n{target_indent}"),
645 ));
646 let nl = target_line_nl?;
647 edits.push((nl, target_text_end, String::new()));
648 } else if !gap_comment_indices.is_empty()
649 && trailing_comment_indices.is_empty()
650 && (two_edit_would_overlap || !has_subsequent_content)
651 {
652 let first_comment_idx = gap_comment_indices[0];
655 let last_comment_idx = *gap_comment_indices.last().unwrap();
656 let (first_comment_start, _) = token_with_span_offsets(sql, &tokens[first_comment_idx])?;
657 let (_, last_comment_end) = token_with_span_offsets(sql, &tokens[last_comment_idx])?;
658
659 if has_subsequent_content {
660 edits.push((
662 gap_start,
663 first_comment_start,
664 format!(" {target_text}\n{target_indent}"),
665 ));
666 edits.push((last_comment_end, target_text_end, String::new()));
667 } else {
668 edits.push((gap_start, first_comment_start, format!(" {target_text} ")));
670 edits.push((last_comment_end, target_text_end, String::new()));
671 }
672 } else if gap_comment_indices.is_empty() && !trailing_comment_indices.is_empty() {
673 edits.push((gap_start, gap_end, " ".to_string()));
676
677 if target_text_end > 0 {
681 let anchor = target_text_end - 1;
682 let anchor_char = &sql[anchor..target_text_end];
683 edits.push((
684 anchor,
685 target_text_end,
686 format!("{anchor_char}\n{target_indent}"),
687 ));
688 } else {
689 return None;
690 }
691 } else if !gap_comment_indices.is_empty() && !trailing_comment_indices.is_empty() {
692 let first_comment_idx = gap_comment_indices[0];
695 let (first_comment_start, _) = token_with_span_offsets(sql, &tokens[first_comment_idx])?;
696
697 edits.push((
699 gap_start,
700 first_comment_start,
701 format!(" {target_text}\n{target_indent}"),
702 ));
703
704 let last_gap_comment_idx = *gap_comment_indices.last().unwrap();
708 let (_, last_gap_comment_end) =
709 token_with_span_offsets(sql, &tokens[last_gap_comment_idx])?;
710 let first_trailing_idx = trailing_comment_indices[0];
711 let (first_trailing_start, _) = token_with_span_offsets(sql, &tokens[first_trailing_idx])?;
712
713 edits.push((
717 last_gap_comment_end,
718 first_trailing_start,
719 target_indent.to_string(),
720 ));
721 } else {
722 return None;
723 }
724
725 Some(edits)
726}
727
728fn safe_from_newline_fix_span(
729 sql: &str,
730 tokens: &[TokenWithSpan],
731 layout: &SelectClauseLayout,
732) -> Option<Lt09AutofixEdits> {
733 let from_idx = layout.from_idx?;
734 if !only_from_shares_last_target_line_violation(layout, tokens) {
735 return None;
736 }
737
738 let (_, last_target_end_idx) = *layout.target_ranges.last()?;
739 if last_target_end_idx == 0 {
740 return None;
741 }
742 let last_token_idx = last_target_end_idx - 1;
743
744 let (_, gap_start) = token_with_span_offsets(sql, &tokens[last_token_idx])?;
745 let (gap_end, _) = token_with_span_offsets(sql, &tokens[from_idx])?;
746 if gap_start > gap_end || gap_end > sql.len() {
747 return None;
748 }
749
750 let gap = &sql[gap_start..gap_end];
751 if gap.chars().all(char::is_whitespace) && !gap.contains('\n') && !gap.contains('\r') {
752 Some(vec![(gap_start, gap_end, "\n".to_string())])
753 } else {
754 None
755 }
756}
757
758fn safe_multi_target_reflow_span(
759 sql: &str,
760 tokens: &[TokenWithSpan],
761 layout: &SelectClauseLayout,
762) -> Option<Lt09AutofixEdits> {
763 if layout.target_ranges.len() < 2 {
764 return None;
765 }
766
767 let from_idx = layout.from_idx?;
768
769 let first_target_idx = layout
770 .target_ranges
771 .first()
772 .map(|(start_idx, _)| *start_idx)?;
773 let last_pre_target_idx = (layout.select_idx..first_target_idx)
774 .rev()
775 .find(|&idx| !is_ignorable_layout_token(&tokens[idx].token))?;
776
777 if (last_pre_target_idx + 1..from_idx).any(|idx| comment_token_text(&tokens[idx]).is_some()) {
778 return None;
779 }
780
781 let (_, replace_start) = token_with_span_offsets(sql, &tokens[last_pre_target_idx])?;
782 let (replace_end, _) = token_with_span_offsets(sql, &tokens[from_idx])?;
783 if replace_start > replace_end || replace_end > sql.len() {
784 return None;
785 }
786
787 let (select_start, _) = token_with_span_offsets(sql, &tokens[layout.select_idx])?;
788 let base_indent = detect_indent(sql, select_start);
789 let target_indent = format!("{base_indent} ");
790
791 let mut replacement = String::from('\n');
792 for (idx, (target_start_idx, target_end_idx)) in layout.target_ranges.iter().enumerate() {
793 let (target_start, _) = token_with_span_offsets(sql, &tokens[*target_start_idx])?;
794 let (_, target_end) = token_with_span_offsets(sql, &tokens[target_end_idx - 1])?;
795 if target_start > target_end || target_end > sql.len() {
796 return None;
797 }
798
799 replacement.push_str(&target_indent);
800 replacement.push_str(&sql[target_start..target_end]);
801 if idx + 1 < layout.target_ranges.len() {
802 replacement.push(',');
803 }
804 replacement.push('\n');
805 }
806 replacement.push_str(&base_indent);
807
808 Some(vec![(replace_start, replace_end, replacement)])
809}
810
811fn only_from_shares_last_target_line_violation(
812 layout: &SelectClauseLayout,
813 tokens: &[TokenWithSpan],
814) -> bool {
815 let Some(from_idx) = layout.from_idx else {
816 return false;
817 };
818 let Some((last_start_idx, _)) = layout.target_ranges.last().copied() else {
819 return false;
820 };
821
822 let last_target_line = tokens[last_start_idx].span.start.line;
823 if tokens[from_idx].span.start.line != last_target_line {
824 return false;
825 }
826
827 for (target_start, _) in &layout.target_ranges {
828 let target_line = tokens[*target_start].span.start.line;
829 if last_code_line_before(tokens, layout.select_idx, *target_start)
830 .is_some_and(|prev_line| prev_line == target_line)
831 {
832 return false;
833 }
834 }
835
836 true
837}
838
839fn comment_token_text(token: &TokenWithSpan) -> Option<String> {
842 use sqlparser::tokenizer::Whitespace;
843 match &token.token {
844 Token::Whitespace(Whitespace::SingleLineComment { prefix, comment }) => {
845 let text = format!("{prefix}{comment}");
846 Some(
847 text.trim_end_matches('\n')
848 .trim_end_matches('\r')
849 .to_string(),
850 )
851 }
852 Token::Whitespace(Whitespace::MultiLineComment(content)) => Some(format!("/*{content}*/")),
853 _ => None,
854 }
855}
856
857fn detect_indent(sql: &str, offset: usize) -> String {
859 let line_start = sql[..offset].rfind('\n').map(|pos| pos + 1).unwrap_or(0);
861 sql[line_start..]
863 .chars()
864 .take_while(|ch| ch.is_whitespace() && *ch != '\n')
865 .collect()
866}
867
868fn line_col_to_offset(sql: &str, line: usize, column: usize) -> Option<usize> {
869 if line == 0 || column == 0 {
870 return None;
871 }
872
873 let mut current_line = 1usize;
874 let mut current_col = 1usize;
875
876 for (offset, ch) in sql.char_indices() {
877 if current_line == line && current_col == column {
878 return Some(offset);
879 }
880
881 if ch == '\n' {
882 current_line += 1;
883 current_col = 1;
884 } else {
885 current_col += 1;
886 }
887 }
888
889 if current_line == line && current_col == column {
890 return Some(sql.len());
891 }
892
893 None
894}
895
896fn token_with_span_offsets(sql: &str, token: &TokenWithSpan) -> Option<(usize, usize)> {
897 let start = line_col_to_offset(
898 sql,
899 token.span.start.line as usize,
900 token.span.start.column as usize,
901 )?;
902 let end = line_col_to_offset(
903 sql,
904 token.span.end.line as usize,
905 token.span.end.column as usize,
906 )?;
907 Some((start, end))
908}
909
910fn offset_to_line_col(sql: &str, offset: usize) -> Option<(usize, usize)> {
911 if offset > sql.len() {
912 return None;
913 }
914 if offset == sql.len() {
915 let mut line = 1usize;
916 let mut column = 1usize;
917 for ch in sql.chars() {
918 if ch == '\n' {
919 line += 1;
920 column = 1;
921 } else {
922 column += 1;
923 }
924 }
925 return Some((line, column));
926 }
927
928 let mut line = 1usize;
929 let mut column = 1usize;
930 for (index, ch) in sql.char_indices() {
931 if index == offset {
932 return Some((line, column));
933 }
934 if ch == '\n' {
935 line += 1;
936 column = 1;
937 } else {
938 column += 1;
939 }
940 }
941
942 None
943}
944
945fn relative_location(
946 location: Location,
947 statement_start_line: usize,
948 statement_start_column: usize,
949) -> Option<Location> {
950 let line = location.line as usize;
951 let column = location.column as usize;
952 if line < statement_start_line {
953 return None;
954 }
955
956 if line == statement_start_line {
957 if column < statement_start_column {
958 return None;
959 }
960 return Some(Location::new(
961 1,
962 (column - statement_start_column + 1) as u64,
963 ));
964 }
965
966 Some(Location::new(
967 (line - statement_start_line + 1) as u64,
968 column as u64,
969 ))
970}
971
972#[cfg(test)]
973mod tests {
974 use super::*;
975 use crate::linter::config::LintConfig;
976 use crate::parser::parse_sql;
977 use crate::types::IssueAutofixApplicability;
978
979 fn run_with_rule(sql: &str, rule: &LayoutSelectTargets) -> Vec<Issue> {
980 let statements = parse_sql(sql).expect("parse");
981 statements
982 .iter()
983 .enumerate()
984 .flat_map(|(index, statement)| {
985 rule.check(
986 statement,
987 &LintContext {
988 sql,
989 statement_range: 0..sql.len(),
990 statement_index: index,
991 },
992 )
993 })
994 .collect()
995 }
996
997 fn run(sql: &str) -> Vec<Issue> {
998 run_with_rule(sql, &LayoutSelectTargets::default())
999 }
1000
1001 fn run_with_wildcard_policy(sql: &str, policy: &str) -> Vec<Issue> {
1002 let config = LintConfig {
1003 enabled: true,
1004 disabled_rules: vec![],
1005 rule_configs: std::collections::BTreeMap::from([(
1006 "layout.select_targets".to_string(),
1007 serde_json::json!({"wildcard_policy": policy}),
1008 )]),
1009 };
1010 let rule = LayoutSelectTargets::from_config(&config);
1011 run_with_rule(sql, &rule)
1012 }
1013
1014 fn apply_issue_autofix(sql: &str, issue: &Issue) -> Option<String> {
1015 let autofix = issue.autofix.as_ref()?;
1016 let mut out = sql.to_string();
1017 let mut edits = autofix.edits.clone();
1018 edits.sort_by_key(|edit| (edit.span.start, edit.span.end));
1019 for edit in edits.into_iter().rev() {
1020 out.replace_range(edit.span.start..edit.span.end, &edit.replacement);
1021 }
1022 Some(out)
1023 }
1024
1025 #[test]
1026 fn flags_multiple_targets_on_same_select_line() {
1027 assert!(!run("SELECT a,b,c,d,e FROM t").is_empty());
1028 }
1029
1030 #[test]
1031 fn does_not_flag_single_target() {
1032 assert!(run("SELECT a FROM t").is_empty());
1033 }
1034
1035 #[test]
1036 fn flags_each_select_line_with_multiple_targets() {
1037 let issues = run("SELECT a, b FROM t UNION ALL SELECT c, d FROM t");
1038 assert_eq!(
1039 issues
1040 .iter()
1041 .filter(|issue| issue.code == issue_codes::LINT_LT_009)
1042 .count(),
1043 2,
1044 );
1045 }
1046
1047 #[test]
1048 fn does_not_flag_select_word_inside_single_quoted_string() {
1049 assert!(run("SELECT 'SELECT a, b' AS txt").is_empty());
1050 }
1051
1052 #[test]
1053 fn multiple_wildcard_policy_flags_single_wildcard_target() {
1054 let issues = run_with_wildcard_policy("SELECT * FROM t", "multiple");
1055 assert_eq!(issues.len(), 1);
1056 assert_eq!(issues[0].code, issue_codes::LINT_LT_009);
1057 }
1058
1059 #[test]
1060 fn wildcard_policy_alias_allow_multiple_is_supported() {
1061 let issues = run_with_wildcard_policy("SELECT * FROM t", "allow_multiple");
1062 assert_eq!(issues.len(), 1);
1063 }
1064
1065 #[test]
1066 fn multiple_wildcard_policy_does_not_treat_multiplication_as_wildcard() {
1067 let issues = run_with_wildcard_policy("SELECT a * b FROM t", "multiple");
1068 assert!(issues.is_empty());
1069 }
1070
1071 #[test]
1072 fn flags_single_target_on_new_line_after_select() {
1073 let sql = "SELECT\n a\nFROM x";
1074 assert!(!run(sql).is_empty());
1075 }
1076
1077 #[test]
1078 fn flags_single_target_when_select_followed_by_comment_line() {
1079 let sql = "SELECT -- some comment\na";
1080 assert!(!run(sql).is_empty());
1081 }
1082
1083 #[test]
1084 fn does_not_flag_single_multiline_target() {
1085 let sql = "SELECT\n SUM(\n 1 + 2\n ) AS col\nFROM t";
1086 assert!(run(sql).is_empty());
1087 }
1088
1089 #[test]
1090 fn flags_last_multi_target_sharing_line_with_from() {
1091 let sql = "select\n a,\n b,\n c from x";
1092 let issues = run(sql);
1093 assert!(!issues.is_empty());
1094 let autofix = issues[0].autofix.as_ref().expect("autofix metadata");
1095 assert_eq!(autofix.applicability, IssueAutofixApplicability::Safe);
1096 let fixed = apply_issue_autofix(sql, &issues[0]).expect("apply autofix");
1097 assert_eq!(fixed, "select\n a,\n b,\n c\nfrom x");
1098 }
1099
1100 #[test]
1101 fn dense_multi_target_layout_violation_autofixes_to_one_target_per_line() {
1102 let sql = "SELECT a,b,c,d,e FROM t";
1103 let issues = run(sql);
1104 assert!(!issues.is_empty());
1105 let autofix = issues[0].autofix.as_ref().expect("autofix metadata");
1106 assert_eq!(autofix.applicability, IssueAutofixApplicability::Safe);
1107 let fixed = apply_issue_autofix(sql, &issues[0]).expect("apply autofix");
1108 assert_eq!(
1109 fixed,
1110 "SELECT\n a,\n b,\n c,\n d,\n e\nFROM t"
1111 );
1112 }
1113
1114 #[test]
1115 fn wrapped_multi_target_layout_autofixes_when_from_is_on_next_line() {
1116 let sql = "SELECT a, b, c,\n d\nFROM t";
1117 let issues = run(sql);
1118 assert!(!issues.is_empty());
1119 let autofix = issues[0].autofix.as_ref().expect("autofix metadata");
1120 assert_eq!(autofix.applicability, IssueAutofixApplicability::Safe);
1121 let fixed = apply_issue_autofix(sql, &issues[0]).expect("apply autofix");
1122 assert_eq!(fixed, "SELECT\n a,\n b,\n c,\n d\nFROM t");
1123 }
1124
1125 #[test]
1126 fn flags_in_cte_single_target_newline_case() {
1127 let sql = "WITH cte1 AS (\n SELECT\n c1 AS c\n FROM t\n)\nSELECT 1 FROM cte1";
1128 assert!(!run(sql).is_empty());
1129 }
1130
1131 #[test]
1132 fn flags_in_create_view_single_target_newline_case() {
1133 let sql = "CREATE VIEW a AS\nSELECT\n c\nFROM table1";
1134 assert!(!run(sql).is_empty());
1135 }
1136
1137 #[test]
1138 fn multiple_wildcard_policy_flags_star_with_from_on_same_line() {
1139 let sql = "select\n * from x";
1140 assert!(!run_with_wildcard_policy(sql, "multiple").is_empty());
1141 }
1142
1143 #[test]
1144 fn multiple_wildcard_policy_allows_star_on_own_line() {
1145 let sql = "select\n *\nfrom x";
1146 assert!(run_with_wildcard_policy(sql, "multiple").is_empty());
1147 }
1148
1149 #[test]
1150 fn single_target_autofix_collapses_to_select_line() {
1151 let sql = "SELECT\n a\nFROM x";
1152 let issues = run(sql);
1153 assert!(!issues.is_empty());
1154 let autofix = issues[0].autofix.as_ref().expect("autofix metadata");
1155 assert_eq!(autofix.applicability, IssueAutofixApplicability::Safe);
1156 let fixed = apply_issue_autofix(sql, &issues[0]).expect("apply autofix");
1157 assert_eq!(fixed, "SELECT a\nFROM x");
1158 }
1159
1160 #[test]
1161 fn single_target_autofix_with_distinct() {
1162 let sql = "SELECT DISTINCT\n a\nFROM x";
1163 let issues = run(sql);
1164 assert!(!issues.is_empty());
1165 let autofix = issues[0].autofix.as_ref().expect("autofix metadata");
1166 assert_eq!(autofix.applicability, IssueAutofixApplicability::Safe);
1167 let fixed = apply_issue_autofix(sql, &issues[0]).expect("apply autofix");
1168 assert_eq!(fixed, "SELECT DISTINCT a\nFROM x");
1169 }
1170
1171 #[test]
1172 fn allows_leading_comma_layout_for_multiple_targets() {
1173 let sql = "select\n a\n , b\n , c";
1174 assert!(run(sql).is_empty());
1175 }
1176
1177 #[test]
1178 fn single_target_with_comment_before_collapses() {
1179 let sql = "SELECT\n -- This is the user's ID.\n user_id\nFROM\n safe_user";
1180 let issues = run(sql);
1181 assert!(!issues.is_empty());
1182 let fixed = apply_issue_autofix(sql, &issues[0]).expect("apply autofix");
1183 assert_eq!(
1184 fixed,
1185 "SELECT user_id\n -- This is the user's ID.\nFROM\n safe_user"
1186 );
1187 }
1188
1189 #[test]
1190 fn single_target_with_block_comment_before_collapses_inline() {
1191 let sql = "SELECT\n /* test */ 10000000";
1193 let issues = run(sql);
1194 assert!(!issues.is_empty());
1195 let fixed = apply_issue_autofix(sql, &issues[0]).expect("apply autofix");
1196 assert_eq!(fixed, "SELECT 10000000 /* test */");
1197 }
1198
1199 #[test]
1200 fn single_target_with_trailing_inline_comment_collapses() {
1201 let sql = "SELECT\n 1-- this is a comment\nFROM\n my_table";
1203 let issues = run(sql);
1204 assert!(!issues.is_empty());
1205 let fixed = apply_issue_autofix(sql, &issues[0]).expect("apply autofix");
1206 assert_eq!(fixed, "SELECT 1\n -- this is a comment\nFROM\n my_table");
1207 }
1208
1209 #[test]
1210 fn single_target_with_block_comment_before_on_same_line_collapses() {
1211 let sql = "SELECT\n /* comment before */ 1\nFROM\n my_table";
1213 let issues = run(sql);
1214 assert!(!issues.is_empty());
1215 let fixed = apply_issue_autofix(sql, &issues[0]).expect("apply autofix");
1216 assert_eq!(fixed, "SELECT 1\n /* comment before */\nFROM\n my_table");
1217 }
1218
1219 #[test]
1220 fn does_not_flag_distinct_on_with_targets_on_own_lines() {
1221 let sql = "SELECT DISTINCT ON (a.id)\n a.id,\n a.name\nFROM a";
1223 assert!(run(sql).is_empty());
1224 }
1225
1226 #[test]
1227 fn does_not_flag_distinct_on_single_target_inline() {
1228 let sql = "SELECT DISTINCT ON (a.id) a.name FROM a";
1230 assert!(run(sql).is_empty());
1231 }
1232
1233 #[test]
1234 fn single_target_with_multiple_mixed_comments_collapses() {
1235 let sql = "SELECT\n -- previous comment\n 1 -- this is a comment\nFROM\n my_table";
1237 let issues = run(sql);
1238 assert!(!issues.is_empty());
1239 let fixed = apply_issue_autofix(sql, &issues[0]).expect("apply autofix");
1240 assert_eq!(
1241 fixed,
1242 "SELECT 1\n -- previous comment\n -- this is a comment\nFROM\n my_table"
1243 );
1244 }
1245}