1use std::collections::HashSet;
7
8use crate::linter::config::LintConfig;
9use crate::linter::rule::{LintContext, LintRule};
10use crate::types::{issue_codes, Dialect, Issue, IssueAutofixApplicability, IssuePatchEdit, Span};
11use regex::Regex;
12use sqlparser::ast::Statement;
13use sqlparser::tokenizer::{Token, TokenWithSpan, Tokenizer, Whitespace};
14
15use super::capitalisation_policy_helpers::{
16 apply_camel_transform, apply_pascal_transform, apply_snake_transform,
17 ignored_words_from_config, ignored_words_regex_from_config, token_is_ignored,
18 tokens_violate_policy, CapitalisationPolicy,
19};
20
21pub struct CapitalisationFunctions {
22 policy: CapitalisationPolicy,
23 ignore_words: HashSet<String>,
24 ignore_words_regex: Option<Regex>,
25 ignore_templated_areas: bool,
26}
27
28impl CapitalisationFunctions {
29 pub fn from_config(config: &LintConfig) -> Self {
30 Self {
31 policy: CapitalisationPolicy::from_rule_config(
32 config,
33 issue_codes::LINT_CP_003,
34 "extended_capitalisation_policy",
35 ),
36 ignore_words: ignored_words_from_config(config, issue_codes::LINT_CP_003),
37 ignore_words_regex: ignored_words_regex_from_config(config, issue_codes::LINT_CP_003),
38 ignore_templated_areas: config
39 .core_option_bool("ignore_templated_areas")
40 .unwrap_or(false),
41 }
42 }
43}
44
45impl Default for CapitalisationFunctions {
46 fn default() -> Self {
47 Self {
48 policy: CapitalisationPolicy::Consistent,
49 ignore_words: HashSet::new(),
50 ignore_words_regex: None,
51 ignore_templated_areas: false,
52 }
53 }
54}
55
56impl LintRule for CapitalisationFunctions {
57 fn code(&self) -> &'static str {
58 issue_codes::LINT_CP_003
59 }
60
61 fn name(&self) -> &'static str {
62 "Function capitalisation"
63 }
64
65 fn description(&self) -> &'static str {
66 "Inconsistent capitalisation of function names."
67 }
68
69 fn check(&self, _statement: &Statement, ctx: &LintContext) -> Vec<Issue> {
70 let functions = function_candidates_for_context(
71 ctx,
72 &self.ignore_words,
73 self.ignore_words_regex.as_ref(),
74 );
75 if functions.is_empty() {
76 return Vec::new();
77 }
78
79 let mut function_tokens = functions
80 .iter()
81 .map(|candidate| candidate.value.clone())
82 .collect::<Vec<_>>();
83 if ctx.is_templated() && !self.ignore_templated_areas {
84 if let Some(rendered_tokens) = rendered_function_values_for_context(
85 ctx,
86 &self.ignore_words,
87 self.ignore_words_regex.as_ref(),
88 ) {
89 if !rendered_tokens.is_empty() {
90 function_tokens = rendered_tokens;
91 }
92 }
93 }
94 if !tokens_violate_policy(&function_tokens, self.policy) {
95 return Vec::new();
96 }
97
98 let resolved_policy = if self.policy == CapitalisationPolicy::Consistent {
99 resolve_consistent_policy_from_values(&function_tokens)
100 } else {
101 self.policy
102 };
103
104 let autofix_edits = function_autofix_edits(ctx, &functions, resolved_policy);
105
106 if autofix_edits.is_empty() {
109 return vec![Issue::info(
110 issue_codes::LINT_CP_003,
111 "Function names use inconsistent capitalisation.",
112 )
113 .with_statement(ctx.statement_index)];
114 }
115
116 autofix_edits
117 .into_iter()
118 .map(|edit| {
119 let span = Span::new(edit.span.start, edit.span.end);
120 Issue::info(
121 issue_codes::LINT_CP_003,
122 "Function names use inconsistent capitalisation.",
123 )
124 .with_statement(ctx.statement_index)
125 .with_span(span)
126 .with_autofix_edits(IssueAutofixApplicability::Safe, vec![edit])
127 })
128 .collect()
129 }
130}
131
132#[derive(Clone, Debug)]
133struct FunctionCandidate {
134 value: String,
135 start: usize,
136 end: usize,
137}
138
139fn function_candidates_for_context(
140 ctx: &LintContext,
141 ignore_words: &HashSet<String>,
142 ignore_words_regex: Option<&Regex>,
143) -> Vec<FunctionCandidate> {
144 let sql = ctx.statement_sql();
145 let Some(tokens) = tokenized(sql, ctx.dialect()) else {
146 return Vec::new();
147 };
148
149 let mut candidates = function_candidates(sql, &tokens, ignore_words, ignore_words_regex);
150 candidates.sort_by_key(|candidate| (candidate.start, candidate.end));
151 candidates
152}
153
154fn function_candidates(
155 sql: &str,
156 tokens: &[TokenWithSpan],
157 ignore_words: &HashSet<String>,
158 ignore_words_regex: Option<&Regex>,
159) -> Vec<FunctionCandidate> {
160 let mut out = Vec::new();
161
162 for (index, token) in tokens.iter().enumerate() {
163 let Token::Word(word) = &token.token else {
164 continue;
165 };
166
167 if token_is_ignored(word.value.as_str(), ignore_words, ignore_words_regex) {
168 continue;
169 }
170
171 if index > 0 && matches!(tokens[index - 1].token, Token::Period) {
174 continue;
175 }
176
177 if is_data_type_keyword(word.value.as_str()) {
181 continue;
182 }
183
184 if is_non_function_sql_keyword(word.value.as_str()) {
187 continue;
188 }
189
190 let next_index = next_non_trivia_index(tokens, index + 1);
191 let is_regular_function_call = next_index
192 .map(|idx| matches!(tokens[idx].token, Token::LParen))
193 .unwrap_or(false);
194 let is_bare_function = is_bare_function_keyword(word.value.as_str());
195 if !is_regular_function_call && !is_bare_function {
196 continue;
197 }
198
199 let Some((start, end)) = token_offsets(sql, token) else {
200 continue;
201 };
202
203 out.push(FunctionCandidate {
204 value: word.value.clone(),
205 start,
206 end,
207 });
208 }
209
210 out
211}
212
213fn function_autofix_edits(
214 ctx: &LintContext,
215 functions: &[FunctionCandidate],
216 resolved_policy: CapitalisationPolicy,
217) -> Vec<IssuePatchEdit> {
218 let mut ordered_functions = functions.to_vec();
219 ordered_functions.sort_by_key(|candidate| (candidate.start, candidate.end));
220
221 let mut edits = Vec::new();
222
223 for candidate in &ordered_functions {
224 let Some(replacement) =
225 function_case_replacement(candidate.value.as_str(), resolved_policy)
226 else {
227 continue;
228 };
229 if replacement == candidate.value {
230 continue;
231 }
232
233 edits.push(IssuePatchEdit::new(
234 ctx.span_from_statement_offset(candidate.start, candidate.end),
235 replacement,
236 ));
237 }
238
239 edits.sort_by_key(|edit| (edit.span.start, edit.span.end));
240 edits.dedup_by(|left, right| {
241 left.span.start == right.span.start
242 && left.span.end == right.span.end
243 && left.replacement == right.replacement
244 });
245 edits
246}
247
248fn function_case_replacement(value: &str, policy: CapitalisationPolicy) -> Option<String> {
249 match policy {
250 CapitalisationPolicy::Consistent => {
251 Some(value.to_ascii_lowercase())
253 }
254 CapitalisationPolicy::Lower => Some(value.to_ascii_lowercase()),
255 CapitalisationPolicy::Upper => Some(value.to_ascii_uppercase()),
256 CapitalisationPolicy::Capitalise => Some(capitalise_ascii_token(value)),
257 CapitalisationPolicy::Pascal => Some(apply_pascal_transform(value)),
258 CapitalisationPolicy::Camel => Some(apply_camel_transform(value)),
259 CapitalisationPolicy::Snake => Some(apply_snake_transform(value)),
260 }
261}
262
263fn resolve_consistent_policy_from_values(values: &[String]) -> CapitalisationPolicy {
264 const UPPER: u8 = 0b001;
265 const LOWER: u8 = 0b010;
266 const CAPITALISE: u8 = 0b100;
267
268 let mut refuted: u8 = 0;
269 let mut latest_possible = CapitalisationPolicy::Upper; for v in values {
272 let v = v.as_str();
273
274 let first_is_lower = v
275 .chars()
276 .find(|c| c.is_ascii_alphabetic())
277 .is_some_and(|c| c.is_ascii_lowercase());
278
279 if first_is_lower {
280 refuted |= UPPER | CAPITALISE;
281 if v != v.to_ascii_lowercase() {
282 refuted |= LOWER;
283 }
284 } else {
285 refuted |= LOWER;
286 if v != v.to_ascii_uppercase() {
287 refuted |= UPPER;
288 }
289 if v != capitalise_ascii_token(v) {
290 refuted |= CAPITALISE;
291 }
292 }
293
294 let possible = (UPPER | LOWER | CAPITALISE) & !refuted;
295 if possible == 0 {
296 return latest_possible;
297 }
298
299 if possible & UPPER != 0 {
300 latest_possible = CapitalisationPolicy::Upper;
301 } else if possible & LOWER != 0 {
302 latest_possible = CapitalisationPolicy::Lower;
303 } else {
304 latest_possible = CapitalisationPolicy::Capitalise;
305 }
306 }
307
308 latest_possible
309}
310
311fn rendered_function_values_for_context(
312 ctx: &LintContext,
313 ignore_words: &HashSet<String>,
314 ignore_words_regex: Option<&Regex>,
315) -> Option<Vec<String>> {
316 ctx.with_document_tokens(|tokens| {
317 if tokens.is_empty() {
318 return None;
319 }
320 Some(function_token_values(
321 tokens,
322 ignore_words,
323 ignore_words_regex,
324 ))
325 })
326}
327
328fn function_token_values(
329 tokens: &[TokenWithSpan],
330 ignore_words: &HashSet<String>,
331 ignore_words_regex: Option<&Regex>,
332) -> Vec<String> {
333 let mut out = Vec::new();
334
335 for (index, token) in tokens.iter().enumerate() {
336 let Token::Word(word) = &token.token else {
337 continue;
338 };
339
340 if token_is_ignored(word.value.as_str(), ignore_words, ignore_words_regex) {
341 continue;
342 }
343
344 if index > 0 && matches!(tokens[index - 1].token, Token::Period) {
345 continue;
346 }
347
348 if is_data_type_keyword(word.value.as_str()) {
349 continue;
350 }
351
352 let next_index = next_non_trivia_index(tokens, index + 1);
353 let is_regular_function_call = next_index
354 .map(|idx| matches!(tokens[idx].token, Token::LParen))
355 .unwrap_or(false);
356 let is_bare_function = is_bare_function_keyword(word.value.as_str());
357 if !is_regular_function_call && !is_bare_function {
358 continue;
359 }
360
361 out.push((
362 token.span.start.line,
363 token.span.start.column,
364 word.value.clone(),
365 ));
366 }
367
368 out.sort_by_key(|(line, column, _)| (*line, *column));
369 out.into_iter().map(|(_, _, value)| value).collect()
370}
371
372fn capitalise_ascii_token(value: &str) -> String {
373 let mut out = String::with_capacity(value.len());
374 let mut seen_alpha = false;
375
376 for ch in value.chars() {
377 if !ch.is_ascii_alphabetic() {
378 out.push(ch);
379 continue;
380 }
381
382 if !seen_alpha {
383 out.push(ch.to_ascii_uppercase());
384 seen_alpha = true;
385 } else {
386 out.push(ch.to_ascii_lowercase());
387 }
388 }
389
390 out
391}
392
393fn tokenized(sql: &str, dialect: Dialect) -> Option<Vec<TokenWithSpan>> {
394 let dialect = dialect.to_sqlparser_dialect();
395 let mut tokenizer = Tokenizer::new(dialect.as_ref(), sql);
396 tokenizer.tokenize_with_location().ok()
397}
398
399fn next_non_trivia_index(tokens: &[TokenWithSpan], mut index: usize) -> Option<usize> {
400 while index < tokens.len() {
401 if !is_trivia_token(&tokens[index].token) {
402 return Some(index);
403 }
404 index += 1;
405 }
406 None
407}
408
409fn is_trivia_token(token: &Token) -> bool {
410 matches!(
411 token,
412 Token::Whitespace(
413 Whitespace::Space
414 | Whitespace::Newline
415 | Whitespace::Tab
416 | Whitespace::SingleLineComment { .. }
417 | Whitespace::MultiLineComment(_)
418 )
419 )
420}
421
422fn token_offsets(sql: &str, token: &TokenWithSpan) -> Option<(usize, usize)> {
423 let start = line_col_to_offset(
424 sql,
425 token.span.start.line as usize,
426 token.span.start.column as usize,
427 )?;
428 let end = line_col_to_offset(
429 sql,
430 token.span.end.line as usize,
431 token.span.end.column as usize,
432 )?;
433 Some((start, end))
434}
435
436fn line_col_to_offset(sql: &str, line: usize, column: usize) -> Option<usize> {
437 if line == 0 || column == 0 {
438 return None;
439 }
440
441 let mut current_line = 1usize;
442 let mut current_col = 1usize;
443
444 for (offset, ch) in sql.char_indices() {
445 if current_line == line && current_col == column {
446 return Some(offset);
447 }
448
449 if ch == '\n' {
450 current_line += 1;
451 current_col = 1;
452 } else {
453 current_col += 1;
454 }
455 }
456
457 if current_line == line && current_col == column {
458 return Some(sql.len());
459 }
460
461 None
462}
463
464fn is_bare_function_keyword(value: &str) -> bool {
465 matches!(
466 value.to_ascii_uppercase().as_str(),
467 "CURRENT_TIMESTAMP" | "CURRENT_DATE" | "CURRENT_TIME" | "LOCALTIME" | "LOCALTIMESTAMP"
468 )
469}
470
471fn is_non_function_sql_keyword(value: &str) -> bool {
474 matches!(
475 value.to_ascii_uppercase().as_str(),
476 "IN" | "NOT" | "EXISTS" | "BETWEEN" | "LIKE" | "ILIKE"
478 | "AND" | "OR" | "IS"
480 | "AS" | "ON" | "USING" | "OVER" | "FILTER" | "WITHIN"
482 | "VALUES" | "SET" | "INTO" | "FROM" | "WHERE" | "HAVING"
484 | "SELECT" | "TABLE" | "JOIN"
485 | "CONFLICT"
486 | "UNION" | "INTERSECT" | "EXCEPT" | "WITH" | "RECURSIVE"
488 | "WHEN" | "THEN" | "ELSE" | "END" | "CASE"
490 | "GROUP" | "ORDER" | "PARTITION" | "LIMIT" | "OFFSET" | "FETCH"
492 | "NULL" | "TRUE" | "FALSE" | "DISTINCT" | "LATERAL"
494 )
495}
496
497fn is_data_type_keyword(value: &str) -> bool {
500 matches!(
501 value.to_ascii_uppercase().as_str(),
502 "INT"
503 | "INTEGER"
504 | "BIGINT"
505 | "SMALLINT"
506 | "TINYINT"
507 | "VARCHAR"
508 | "CHAR"
509 | "TEXT"
510 | "BOOLEAN"
511 | "BOOL"
512 | "STRING"
513 | "INT64"
514 | "FLOAT64"
515 | "BYTES"
516 | "TIME"
517 | "TIMESTAMP"
518 | "INTERVAL"
519 | "NUMERIC"
520 | "DECIMAL"
521 | "FLOAT"
522 | "DOUBLE"
523 | "STRUCT"
524 | "ARRAY"
525 | "MAP"
526 | "ENUM"
527 )
528}
529
530#[cfg(test)]
531mod tests {
532 use super::*;
533 use crate::linter::config::LintConfig;
534 use crate::linter::rule::{with_active_document_tokens, with_active_is_templated};
535 use crate::parser::parse_sql;
536 use crate::types::IssueAutofixApplicability;
537
538 fn run(sql: &str) -> Vec<Issue> {
539 let statements = parse_sql(sql).expect("parse");
540 let rule = CapitalisationFunctions::default();
541 statements
542 .iter()
543 .enumerate()
544 .flat_map(|(index, statement)| {
545 rule.check(
546 statement,
547 &LintContext {
548 sql,
549 statement_range: 0..sql.len(),
550 statement_index: index,
551 },
552 )
553 })
554 .collect()
555 }
556
557 fn apply_issue_autofix(sql: &str, issue: &Issue) -> Option<String> {
558 let autofix = issue.autofix.as_ref()?;
559 let mut out = sql.to_string();
560 let mut edits = autofix.edits.clone();
561 edits.sort_by_key(|edit| (edit.span.start, edit.span.end));
562 for edit in edits.into_iter().rev() {
563 out.replace_range(edit.span.start..edit.span.end, &edit.replacement);
564 }
565 Some(out)
566 }
567
568 fn apply_all_autofixes(sql: &str, issues: &[Issue]) -> String {
569 let mut edits: Vec<_> = issues
570 .iter()
571 .filter_map(|i| i.autofix.as_ref())
572 .flat_map(|a| a.edits.clone())
573 .collect();
574 edits.sort_by_key(|edit| (edit.span.start, edit.span.end));
575 let mut out = sql.to_string();
576 for edit in edits.into_iter().rev() {
577 out.replace_range(edit.span.start..edit.span.end, &edit.replacement);
578 }
579 out
580 }
581
582 #[test]
583 fn flags_mixed_function_case() {
584 let issues = run("SELECT COUNT(*), count(x) FROM t");
585 assert_eq!(issues.len(), 1);
586 assert_eq!(issues[0].code, issue_codes::LINT_CP_003);
587 }
588
589 #[test]
590 fn emits_safe_autofix_for_mixed_function_case() {
591 let sql = "SELECT COUNT(*), count(x) FROM t";
592 let issues = run(sql);
593 assert_eq!(issues.len(), 1);
594 let autofix = issues[0].autofix.as_ref().expect("autofix metadata");
595 assert_eq!(autofix.applicability, IssueAutofixApplicability::Safe);
596 let fixed = apply_issue_autofix(sql, &issues[0]).expect("apply autofix");
597 assert_eq!(fixed, "SELECT COUNT(*), COUNT(x) FROM t");
598 }
599
600 #[test]
601 fn does_not_flag_consistent_function_case() {
602 assert!(run("SELECT lower(x), upper(y) FROM t").is_empty());
603 }
604
605 #[test]
606 fn on_conflict_clause_keyword_is_not_treated_as_function_name() {
607 let sql = "INSERT INTO t (id) VALUES (1) ON CONFLICT (id) DO UPDATE SET updated_at = now(), touched_at = NOW()";
608 let issues = run(sql);
609 assert_eq!(issues.len(), 1);
610 assert_eq!(issues[0].code, issue_codes::LINT_CP_003);
611 }
612
613 #[test]
614 fn date_function_calls_are_tracked() {
615 let issues = run("SELECT date(ts), DATE(ts) FROM t");
616 assert_eq!(issues.len(), 1);
617 assert_eq!(issues[0].code, issue_codes::LINT_CP_003);
618 }
619
620 #[test]
621 fn does_not_flag_function_like_text_in_strings_or_comments() {
622 let sql = "SELECT 'COUNT(x) count(y)' AS txt -- COUNT(x)\nFROM t";
623 assert!(run(sql).is_empty());
624 }
625
626 #[test]
627 fn lower_policy_flags_uppercase_function_name() {
628 let config = LintConfig {
629 enabled: true,
630 disabled_rules: vec![],
631 rule_configs: std::collections::BTreeMap::from([(
632 "LINT_CP_003".to_string(),
633 serde_json::json!({"extended_capitalisation_policy": "lower"}),
634 )]),
635 };
636 let rule = CapitalisationFunctions::from_config(&config);
637 let sql = "SELECT COUNT(x) FROM t";
638 let statements = parse_sql(sql).expect("parse");
639 let issues = rule.check(
640 &statements[0],
641 &LintContext {
642 sql,
643 statement_range: 0..sql.len(),
644 statement_index: 0,
645 },
646 );
647 assert_eq!(issues.len(), 1);
648 }
649
650 #[test]
651 fn upper_policy_emits_uppercase_autofix() {
652 let config = LintConfig {
653 enabled: true,
654 disabled_rules: vec![],
655 rule_configs: std::collections::BTreeMap::from([(
656 "LINT_CP_003".to_string(),
657 serde_json::json!({"extended_capitalisation_policy": "upper"}),
658 )]),
659 };
660 let rule = CapitalisationFunctions::from_config(&config);
661 let sql = "SELECT count(x) FROM t";
662 let statements = parse_sql(sql).expect("parse");
663 let issues = rule.check(
664 &statements[0],
665 &LintContext {
666 sql,
667 statement_range: 0..sql.len(),
668 statement_index: 0,
669 },
670 );
671 assert_eq!(issues.len(), 1);
672 let fixed = apply_issue_autofix(sql, &issues[0]).expect("apply autofix");
673 assert_eq!(fixed, "SELECT COUNT(x) FROM t");
674 }
675
676 #[test]
677 fn camel_policy_emits_autofix() {
678 let config = LintConfig {
679 enabled: true,
680 disabled_rules: vec![],
681 rule_configs: std::collections::BTreeMap::from([(
682 "LINT_CP_003".to_string(),
683 serde_json::json!({"extended_capitalisation_policy": "camel"}),
684 )]),
685 };
686 let rule = CapitalisationFunctions::from_config(&config);
687 let sql = "SELECT COUNT(x), SUM(y) FROM t";
688 let statements = parse_sql(sql).expect("parse");
689 let issues = rule.check(
690 &statements[0],
691 &LintContext {
692 sql,
693 statement_range: 0..sql.len(),
694 statement_index: 0,
695 },
696 );
697 assert_eq!(issues.len(), 2);
699 let fixed = apply_all_autofixes(sql, &issues);
700 assert_eq!(fixed, "SELECT cOUNT(x), sUM(y) FROM t");
701 }
702
703 #[test]
704 fn pascal_policy_emits_autofix() {
705 let config = LintConfig {
706 enabled: true,
707 disabled_rules: vec![],
708 rule_configs: std::collections::BTreeMap::from([(
709 "LINT_CP_003".to_string(),
710 serde_json::json!({"extended_capitalisation_policy": "pascal"}),
711 )]),
712 };
713 let rule = CapitalisationFunctions::from_config(&config);
714 let sql = "SELECT current_timestamp, min(a) FROM t";
715 let statements = parse_sql(sql).expect("parse");
716 let issues = rule.check(
717 &statements[0],
718 &LintContext {
719 sql,
720 statement_range: 0..sql.len(),
721 statement_index: 0,
722 },
723 );
724 assert_eq!(issues.len(), 2);
726 let fixed = apply_all_autofixes(sql, &issues);
727 assert_eq!(fixed, "SELECT Current_Timestamp, Min(a) FROM t");
728 }
729
730 #[test]
731 fn snake_policy_emits_autofix() {
732 let config = LintConfig {
733 enabled: true,
734 disabled_rules: vec![],
735 rule_configs: std::collections::BTreeMap::from([(
736 "LINT_CP_003".to_string(),
737 serde_json::json!({"extended_capitalisation_policy": "snake"}),
738 )]),
739 };
740 let rule = CapitalisationFunctions::from_config(&config);
741 let sql = "SELECT Current_Timestamp, Min(a) FROM t";
742 let statements = parse_sql(sql).expect("parse");
743 let issues = rule.check(
744 &statements[0],
745 &LintContext {
746 sql,
747 statement_range: 0..sql.len(),
748 statement_index: 0,
749 },
750 );
751 assert_eq!(issues.len(), 2);
753 let fixed = apply_all_autofixes(sql, &issues);
754 assert_eq!(fixed, "SELECT current_timestamp, min(a) FROM t");
755 }
756
757 #[test]
758 fn ignore_words_regex_excludes_functions_from_check() {
759 let config = LintConfig {
760 enabled: true,
761 disabled_rules: vec![],
762 rule_configs: std::collections::BTreeMap::from([(
763 "LINT_CP_003".to_string(),
764 serde_json::json!({"ignore_words_regex": "^count$"}),
765 )]),
766 };
767 let rule = CapitalisationFunctions::from_config(&config);
768 let sql = "SELECT COUNT(*), count(x) FROM t";
769 let statements = parse_sql(sql).expect("parse");
770 let issues = rule.check(
771 &statements[0],
772 &LintContext {
773 sql,
774 statement_range: 0..sql.len(),
775 statement_index: 0,
776 },
777 );
778 assert!(issues.is_empty());
779 }
780
781 #[test]
782 fn bare_function_keywords_are_tracked() {
783 let issues = run("SELECT CURRENT_TIMESTAMP, current_timestamp FROM t");
784 assert_eq!(issues.len(), 1);
785 assert_eq!(issues[0].code, issue_codes::LINT_CP_003);
786 }
787
788 #[test]
789 fn quantified_any_keyword_tracks_capitalisation_for_parity() {
790 let sql = "SELECT count(*), col = ANY(arr) FROM t";
791 let issues = run(sql);
792 assert_eq!(issues.len(), 1);
793 let fixed = apply_all_autofixes(sql, &issues);
794 assert_eq!(fixed, "SELECT count(*), col = any(arr) FROM t");
795 }
796
797 #[test]
798 fn consistent_policy_autofix_uses_source_order_even_when_candidates_are_unsorted() {
799 let sql = "SELECT greatest(x), GREATEST(y) FROM t";
800 let ctx = LintContext {
801 sql,
802 statement_range: 0..sql.len(),
803 statement_index: 0,
804 };
805
806 let upper_start = sql.find("GREATEST").expect("uppercase function position");
807 let lower_start = sql.find("greatest").expect("lowercase function position");
808 let unsorted = vec![
809 FunctionCandidate {
810 value: "GREATEST".to_string(),
811 start: upper_start,
812 end: upper_start + "GREATEST".len(),
813 },
814 FunctionCandidate {
815 value: "greatest".to_string(),
816 start: lower_start,
817 end: lower_start + "greatest".len(),
818 },
819 ];
820
821 let edits = function_autofix_edits(&ctx, &unsorted, CapitalisationPolicy::Consistent);
822 assert_eq!(edits.len(), 1);
823 assert_eq!(edits[0].span.start, upper_start);
824 assert_eq!(edits[0].span.end, upper_start + "GREATEST".len());
825 assert_eq!(edits[0].replacement, "greatest");
826 }
827
828 #[test]
829 fn templated_policy_tokens_drive_source_mapped_autofix_when_not_ignored() {
830 let source_sql = "SELECT\n {{ \"greatest(a, b)\" }},\n GREATEST(i, j)\n";
831 let rendered_sql = "SELECT\n greatest(a, b),\n GREATEST(i, j)\n";
832 let rendered_tokens = tokenized(rendered_sql, Dialect::Ansi).expect("rendered tokens");
833 let statements = parse_sql("SELECT 1").expect("synthetic parse");
834 let rule = CapitalisationFunctions::from_config(&LintConfig {
835 enabled: true,
836 disabled_rules: vec![],
837 rule_configs: std::collections::BTreeMap::from([(
838 "core".to_string(),
839 serde_json::json!({"ignore_templated_areas": false}),
840 )]),
841 });
842
843 let issues = with_active_is_templated(true, || {
844 with_active_document_tokens(&rendered_tokens, || {
845 rule.check(
846 &statements[0],
847 &LintContext {
848 sql: source_sql,
849 statement_range: 0..source_sql.len(),
850 statement_index: 0,
851 },
852 )
853 })
854 });
855
856 assert_eq!(issues.len(), 1);
857 let autofix = issues[0]
858 .autofix
859 .as_ref()
860 .expect("expected autofix metadata");
861 assert!(
862 autofix
863 .edits
864 .iter()
865 .any(
866 |edit| &source_sql[edit.span.start..edit.span.end] == "GREATEST"
867 && edit.replacement == "greatest"
868 ),
869 "expected source-mapped GREATEST fix, got edits={:?}",
870 autofix.edits
871 );
872 }
873}