Skip to main content

flowscope_core/linter/rules/
cv_002.rs

1//! LINT_CV_002: prefer COALESCE over IFNULL/NVL.
2//!
3//! SQLFluff CV02 parity: detect IFNULL/NVL function usage and recommend
4//! COALESCE for portability and consistency.
5
6use crate::linter::rule::{LintContext, LintRule};
7use crate::linter::visit;
8use crate::types::{issue_codes, Dialect, Issue, IssueAutofixApplicability, IssuePatchEdit};
9use sqlparser::ast::{Expr, Statement};
10use sqlparser::tokenizer::{Token, TokenWithSpan, Tokenizer, Whitespace};
11
12pub struct CoalesceConvention;
13
14impl LintRule for CoalesceConvention {
15    fn code(&self) -> &'static str {
16        issue_codes::LINT_CV_002
17    }
18
19    fn name(&self) -> &'static str {
20        "COALESCE convention"
21    }
22
23    fn description(&self) -> &'static str {
24        "Use 'COALESCE' instead of 'IFNULL' or 'NVL'."
25    }
26
27    fn check(&self, stmt: &Statement, ctx: &LintContext) -> Vec<Issue> {
28        let function_name_spans =
29            tokenized_for_context(ctx).or_else(|| tokenized(ctx.statement_sql(), ctx.dialect()));
30        let function_name_spans = function_name_spans
31            .as_deref()
32            .map(collect_coalesce_function_name_spans)
33            .unwrap_or_default();
34        let mut span_index = 0usize;
35        let mut issues = Vec::new();
36
37        visit::visit_expressions(stmt, &mut |expr| {
38            let Expr::Function(function) = expr else {
39                return;
40            };
41
42            let function_name = function.name.to_string();
43            let function_name_upper = function_name.to_ascii_uppercase();
44
45            if function_name_upper != "IFNULL" && function_name_upper != "NVL" {
46                return;
47            }
48
49            let mut issue = Issue::info(
50                issue_codes::LINT_CV_002,
51                format!("Use 'COALESCE' instead of '{}'.", function_name_upper),
52            )
53            .with_statement(ctx.statement_index);
54            if let Some((start, end)) = function_name_spans.get(span_index).copied() {
55                let span = ctx.span_from_statement_offset(start, end);
56                issue = issue.with_span(span).with_autofix_edits(
57                    IssueAutofixApplicability::Safe,
58                    vec![IssuePatchEdit::new(span, "COALESCE")],
59                );
60            }
61            span_index = span_index.saturating_add(1);
62            issues.push(issue);
63        });
64
65        issues
66    }
67}
68
69fn collect_coalesce_function_name_spans(tokens: &[LocatedToken]) -> Vec<(usize, usize)> {
70    let mut spans = Vec::new();
71    let mut i = 0usize;
72    while i < tokens.len() {
73        let Token::Word(word) = &tokens[i].token else {
74            i += 1;
75            continue;
76        };
77        if !word.value.eq_ignore_ascii_case("IFNULL") && !word.value.eq_ignore_ascii_case("NVL") {
78            i += 1;
79            continue;
80        }
81
82        let mut j = i + 1;
83        skip_trivia_tokens(tokens, &mut j);
84        if j >= tokens.len() || !matches!(tokens[j].token, Token::LParen) {
85            i += 1;
86            continue;
87        }
88
89        spans.push((tokens[i].start, tokens[i].end));
90        i = j + 1;
91    }
92
93    spans
94}
95
96#[derive(Debug, Clone)]
97struct LocatedToken {
98    token: Token,
99    start: usize,
100    end: usize,
101}
102
103fn tokenized_for_context(ctx: &LintContext) -> Option<Vec<LocatedToken>> {
104    let tokens = ctx.with_document_tokens(|tokens| {
105        if tokens.is_empty() {
106            return None;
107        }
108
109        Some(
110            tokens
111                .iter()
112                .filter_map(|token| {
113                    token_with_span_offsets(ctx.sql, token).map(|(start, end)| LocatedToken {
114                        token: token.token.clone(),
115                        start,
116                        end,
117                    })
118                })
119                .collect::<Vec<_>>(),
120        )
121    });
122
123    if let Some(tokens) = tokens {
124        return Some(tokens);
125    }
126
127    tokenized(ctx.sql, ctx.dialect())
128}
129
130fn tokenized(sql: &str, dialect: Dialect) -> Option<Vec<LocatedToken>> {
131    let dialect = dialect.to_sqlparser_dialect();
132    let mut tokenizer = Tokenizer::new(dialect.as_ref(), sql);
133    let tokens = tokenizer.tokenize_with_location().ok()?;
134
135    let mut out = Vec::with_capacity(tokens.len());
136    for token in tokens {
137        let Some((start, end)) = token_with_span_offsets(sql, &token) else {
138            continue;
139        };
140        out.push(LocatedToken {
141            token: token.token,
142            start,
143            end,
144        });
145    }
146
147    Some(out)
148}
149
150fn token_with_span_offsets(sql: &str, token: &TokenWithSpan) -> Option<(usize, usize)> {
151    let start = line_col_to_offset(
152        sql,
153        token.span.start.line as usize,
154        token.span.start.column as usize,
155    )?;
156    let end = line_col_to_offset(
157        sql,
158        token.span.end.line as usize,
159        token.span.end.column as usize,
160    )?;
161    Some((start, end))
162}
163
164fn line_col_to_offset(sql: &str, line: usize, column: usize) -> Option<usize> {
165    if line == 0 || column == 0 {
166        return None;
167    }
168
169    let mut current_line = 1usize;
170    let mut current_col = 1usize;
171
172    for (offset, ch) in sql.char_indices() {
173        if current_line == line && current_col == column {
174            return Some(offset);
175        }
176
177        if ch == '\n' {
178            current_line += 1;
179            current_col = 1;
180        } else {
181            current_col += 1;
182        }
183    }
184
185    if current_line == line && current_col == column {
186        return Some(sql.len());
187    }
188
189    None
190}
191
192fn skip_trivia_tokens(tokens: &[LocatedToken], index: &mut usize) {
193    while *index < tokens.len() {
194        if !is_trivia_token(&tokens[*index].token) {
195            break;
196        }
197        *index += 1;
198    }
199}
200
201fn is_trivia_token(token: &Token) -> bool {
202    matches!(
203        token,
204        Token::Whitespace(Whitespace::Space | Whitespace::Tab | Whitespace::Newline)
205            | Token::Whitespace(Whitespace::SingleLineComment { .. })
206            | Token::Whitespace(Whitespace::MultiLineComment(_))
207    )
208}
209
210#[cfg(test)]
211mod tests {
212    use super::*;
213    use crate::parser::parse_sql;
214
215    fn run(sql: &str) -> Vec<Issue> {
216        let statements = parse_sql(sql).expect("parse");
217        let rule = CoalesceConvention;
218        statements
219            .iter()
220            .enumerate()
221            .flat_map(|(index, statement)| {
222                rule.check(
223                    statement,
224                    &LintContext {
225                        sql,
226                        statement_range: 0..sql.len(),
227                        statement_index: index,
228                    },
229                )
230            })
231            .collect()
232    }
233
234    // --- Edge cases adopted from sqlfluff CV02 ---
235
236    #[test]
237    fn passes_coalesce() {
238        let issues = run("SELECT coalesce(foo, 0) AS bar FROM baz");
239        assert!(issues.is_empty());
240    }
241
242    #[test]
243    fn fails_ifnull() {
244        let issues = run("SELECT ifnull(foo, 0) AS bar FROM baz");
245        assert_eq!(issues.len(), 1);
246        assert_eq!(issues[0].code, issue_codes::LINT_CV_002);
247        let fix = issues[0].autofix.as_ref().expect("autofix metadata");
248        assert_eq!(fix.applicability, IssueAutofixApplicability::Safe);
249        assert_eq!(fix.edits.len(), 1);
250        assert_eq!(fix.edits[0].replacement, "COALESCE");
251    }
252
253    #[test]
254    fn fails_nvl() {
255        let issues = run("SELECT nvl(foo, 0) AS bar FROM baz");
256        assert_eq!(issues.len(), 1);
257        assert_eq!(issues[0].code, issue_codes::LINT_CV_002);
258        let fix = issues[0].autofix.as_ref().expect("autofix metadata");
259        assert_eq!(fix.applicability, IssueAutofixApplicability::Safe);
260        assert_eq!(fix.edits.len(), 1);
261        assert_eq!(fix.edits[0].replacement, "COALESCE");
262    }
263
264    #[test]
265    fn does_not_flag_case_when_null_pattern_anymore() {
266        let issues = run("SELECT CASE WHEN x IS NULL THEN 'default' ELSE x END FROM t");
267        assert!(issues.is_empty());
268    }
269
270    #[test]
271    fn flags_nested_ifnull_calls() {
272        let issues = run("SELECT SUM(IFNULL(amount, 0)) AS total FROM orders");
273        assert_eq!(issues.len(), 1);
274    }
275}