sqrust_rules/ambiguous/
date_arithmetic.rs1use sqrust_core::{Diagnostic, FileContext, Rule};
2
3pub struct DateArithmetic;
4
5impl Rule for DateArithmetic {
6 fn name(&self) -> &'static str {
7 "Ambiguous/DateArithmetic"
8 }
9
10 fn check(&self, ctx: &FileContext) -> Vec<Diagnostic> {
11 if !ctx.parse_errors.is_empty() {
12 return Vec::new();
13 }
14
15 let source = &ctx.source;
16 find_date_arithmetic_violations(source, ctx)
17 }
18}
19
20const DATE_PREFIXES: &[&str] = &[
23 "date", "time", "timestamp", "ts", "created", "updated", "modified",
24];
25
26const DATE_SUFFIXES: &[&str] = &[
29 "date", "time", "timestamp", "ts", "at", "on", "created", "updated", "modified",
30];
31
32fn is_date_like_identifier(token: &str) -> bool {
37 let lower = token.to_ascii_lowercase();
38
39 for prefix in DATE_PREFIXES {
41 if lower == *prefix {
42 return true;
43 }
44 let prefixed = format!("{}_", prefix);
45 if lower.starts_with(&prefixed) {
46 return true;
47 }
48 }
49
50 for suffix in DATE_SUFFIXES {
52 let suffixed = format!("_{}", suffix);
53 if lower.ends_with(&suffixed) {
54 return true;
55 }
56 }
57
58 false
59}
60
61fn is_integer_token(token: &str) -> bool {
63 !token.is_empty() && token.bytes().all(|b| b.is_ascii_digit())
64}
65
66fn is_ident_char(b: u8) -> bool {
68 b.is_ascii_alphanumeric() || b == b'_'
69}
70
71fn find_date_arithmetic_violations(source: &str, _ctx: &FileContext) -> Vec<Diagnostic> {
75 let mut diags = Vec::new();
76 let bytes = source.as_bytes();
77 let len = bytes.len();
78 let mut i = 0;
79
80 while i < len {
81 if bytes[i] == b'\'' {
83 i += 1;
84 while i < len && bytes[i] != b'\'' {
85 if bytes[i] == b'\\' {
86 i += 1;
87 }
88 i += 1;
89 }
90 i += 1; continue;
92 }
93
94 if i + 1 < len && bytes[i] == b'-' && bytes[i + 1] == b'-' {
96 while i < len && bytes[i] != b'\n' {
97 i += 1;
98 }
99 continue;
100 }
101
102 if i + 1 < len && bytes[i] == b'/' && bytes[i + 1] == b'*' {
104 i += 2;
105 while i + 1 < len && !(bytes[i] == b'*' && bytes[i + 1] == b'/') {
106 i += 1;
107 }
108 i += 2; continue;
110 }
111
112 if bytes[i] == b'+' || bytes[i] == b'-' {
114 let op_pos = i;
115
116 let left_end = scan_back_skip_whitespace(bytes, op_pos);
118 let left_token = extract_token_backwards(bytes, left_end);
119
120 let right_start = scan_forward_skip_whitespace(bytes, op_pos + 1);
122 let right_token = extract_token_forwards(bytes, right_start);
123
124 let should_flag = (!left_token.is_empty()
126 && !right_token.is_empty()
127 && is_date_like_identifier(&left_token)
128 && is_integer_token(&right_token))
129 || (!left_token.is_empty()
130 && !right_token.is_empty()
131 && is_integer_token(&left_token)
132 && is_date_like_identifier(&right_token));
133
134 if should_flag {
135 let (line, col) = offset_to_line_col(source, op_pos);
136 diags.push(Diagnostic {
137 rule: "Ambiguous/DateArithmetic",
138 message: "Date arithmetic with integer offset is database-specific \
139 — use INTERVAL '1' DAY or dialect-specific functions for portability"
140 .to_string(),
141 line,
142 col,
143 });
144 }
145 }
146
147 i += 1;
148 }
149
150 diags
151}
152
153fn scan_back_skip_whitespace(bytes: &[u8], pos: usize) -> usize {
156 if pos == 0 {
157 return 0;
158 }
159 let mut j = pos - 1;
160 while j > 0 && (bytes[j] == b' ' || bytes[j] == b'\t' || bytes[j] == b'\n' || bytes[j] == b'\r') {
161 j -= 1;
162 }
163 j
164}
165
166fn scan_forward_skip_whitespace(bytes: &[u8], pos: usize) -> usize {
168 let mut j = pos;
169 while j < bytes.len()
170 && (bytes[j] == b' ' || bytes[j] == b'\t' || bytes[j] == b'\n' || bytes[j] == b'\r')
171 {
172 j += 1;
173 }
174 j
175}
176
177fn extract_token_backwards(bytes: &[u8], end_pos: usize) -> String {
180 if end_pos >= bytes.len() || !is_ident_char(bytes[end_pos]) {
181 return String::new();
182 }
183 let mut start = end_pos;
184 while start > 0 && is_ident_char(bytes[start - 1]) {
185 start -= 1;
186 }
187 String::from_utf8_lossy(&bytes[start..=end_pos]).into_owned()
188}
189
190fn extract_token_forwards(bytes: &[u8], start_pos: usize) -> String {
192 if start_pos >= bytes.len() || !is_ident_char(bytes[start_pos]) {
193 return String::new();
194 }
195 let mut end = start_pos;
196 while end < bytes.len() && is_ident_char(bytes[end]) {
197 end += 1;
198 }
199 String::from_utf8_lossy(&bytes[start_pos..end]).into_owned()
200}
201
202fn offset_to_line_col(source: &str, offset: usize) -> (usize, usize) {
204 let before = &source[..offset];
205 let line = before.chars().filter(|&c| c == '\n').count() + 1;
206 let col = before.rfind('\n').map(|p| offset - p - 1).unwrap_or(offset) + 1;
207 (line, col)
208}