1use std::collections::HashMap;
7use std::fs;
8use std::path::{Path, PathBuf};
9
10use serde::{Deserialize, Serialize};
11
12use super::{Finding, FindingSeverity};
13
14#[derive(Debug, Clone, Serialize, Deserialize)]
16pub struct SpecClaim {
17 pub id: String,
19 pub title: String,
21 pub line: usize,
23 pub section_path: Vec<String>,
25 pub implementations: Vec<CodeLocation>,
27 pub findings: Vec<String>,
29 pub status: ClaimStatus,
31}
32
33#[derive(Debug, Clone, Serialize, Deserialize)]
35pub struct CodeLocation {
36 pub file: PathBuf,
37 pub line: usize,
38 pub context: String,
39}
40
41#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
43pub enum ClaimStatus {
44 Verified,
46 Warning,
48 Failed,
50 Pending,
52}
53
54impl std::fmt::Display for ClaimStatus {
55 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
56 match self {
57 Self::Verified => write!(f, "✓ Verified"),
58 Self::Warning => write!(f, "⚠️ Warning"),
59 Self::Failed => write!(f, "✗ Failed"),
60 Self::Pending => write!(f, "○ Pending"),
61 }
62 }
63}
64
65#[derive(Debug, Clone, Serialize, Deserialize)]
67pub struct ParsedSpec {
68 pub path: PathBuf,
70 pub claims: Vec<SpecClaim>,
72 pub original_content: String,
74}
75
76impl ParsedSpec {
77 pub fn parse(spec_path: &Path) -> Result<Self, String> {
79 let content = fs::read_to_string(spec_path)
80 .map_err(|e| format!("Failed to read spec file: {}", e))?;
81
82 let claims = parse_claims(&content);
83
84 Ok(Self { path: spec_path.to_path_buf(), claims, original_content: content })
85 }
86
87 pub fn claims_for_section(&self, section: &str) -> Vec<&SpecClaim> {
89 self.claims
90 .iter()
91 .filter(|c| {
92 c.section_path.iter().any(|s| s.contains(section))
93 || c.id.contains(section)
94 || c.title.contains(section)
95 })
96 .collect()
97 }
98
99 pub fn update_with_findings(
101 &mut self,
102 findings: &[(String, Vec<Finding>)], ) -> Result<String, String> {
104 let mut updated = remove_existing_status_blocks(&self.original_content);
106
107 for (claim_id, claim_findings) in findings {
108 if let Some(claim) = self.claims.iter_mut().find(|c| c.id == *claim_id) {
110 claim.status = if claim_findings.is_empty() {
112 ClaimStatus::Verified
113 } else if claim_findings.iter().any(|f| {
114 matches!(f.severity, FindingSeverity::Critical | FindingSeverity::High)
115 }) {
116 ClaimStatus::Failed
117 } else {
118 ClaimStatus::Warning
119 };
120
121 let status_block = generate_status_block(claim, claim_findings);
123
124 if let Some(insert_pos) = find_claim_end(&updated, &claim.id) {
127 updated.insert_str(insert_pos, &status_block);
128 }
129 }
130 }
131
132 Ok(updated)
133 }
134
135 pub fn write_updated(&self, updated_content: &str) -> Result<(), String> {
137 let backup_path = self.path.with_extension("md.bak");
139 fs::copy(&self.path, &backup_path)
140 .map_err(|e| format!("Failed to create backup: {}", e))?;
141
142 fs::write(&self.path, updated_content)
144 .map_err(|e| format!("Failed to write spec: {}", e))?;
145
146 Ok(())
147 }
148}
149
150fn parse_claims(content: &str) -> Vec<SpecClaim> {
152 let mut claims = Vec::new();
153 let mut current_sections: Vec<String> = Vec::new();
154
155 for (line_num, line) in content.lines().enumerate() {
156 let line_num = line_num + 1;
157 let trimmed = line.trim();
158
159 if let Some(section) = trimmed.strip_prefix("## ") {
161 current_sections.clear();
162 current_sections.push(section.to_string());
163 } else if let Some(subsection) = trimmed.strip_prefix("### ") {
164 current_sections.truncate(1);
166 current_sections.push(subsection.to_string());
167 }
168
169 if trimmed.starts_with("### ") {
171 if let Some((id, title)) = parse_claim_header(trimmed) {
172 claims.push(SpecClaim {
173 id,
174 title,
175 line: line_num,
176 section_path: current_sections.clone(),
177 implementations: Vec::new(),
178 findings: Vec::new(),
179 status: ClaimStatus::Pending,
180 });
181 }
182 }
183 }
184
185 claims
186}
187
188fn parse_claim_header(header: &str) -> Option<(String, String)> {
190 let text = header.trim_start_matches('#').trim();
191
192 let colon_pos = text.find(':')?;
197 let potential_id = &text[..colon_pos];
198 let title = text[colon_pos + 1..].trim();
199
200 let dash_pos = potential_id.find('-')?;
202 let prefix = &potential_id[..dash_pos];
203 let suffix = &potential_id[dash_pos + 1..];
204
205 if prefix.is_empty() || prefix.len() > 4 || !prefix.chars().all(|c| c.is_ascii_uppercase()) {
207 return None;
208 }
209
210 if suffix.is_empty() || suffix.len() > 4 || !suffix.chars().all(|c| c.is_ascii_digit()) {
212 return None;
213 }
214
215 Some((potential_id.to_string(), title.to_string()))
216}
217
218fn generate_status_block(claim: &SpecClaim, findings: &[Finding]) -> String {
220 let mut block = String::new();
221 block.push_str("\n\n<!-- bug-hunter-status -->\n");
222 block.push_str(&format!("**Bug Hunter Status:** {}\n", claim.status));
223
224 if !claim.implementations.is_empty() {
225 block.push_str("**Implementations:**\n");
226 for loc in &claim.implementations {
227 block.push_str(&format!("- `{}:{}` - {}\n", loc.file.display(), loc.line, loc.context));
228 }
229 }
230
231 if findings.is_empty() {
232 block.push_str("**Findings:** None ✓\n");
233 } else {
234 block.push_str(&format!("**Findings:** {} issue(s)\n", findings.len()));
235 for finding in findings.iter().take(5) {
236 block.push_str(&format!(
237 "- [{}]({}) - {}\n",
238 finding.id,
239 finding.location(),
240 finding.title
241 ));
242 }
243 if findings.len() > 5 {
244 block.push_str(&format!("- ... and {} more\n", findings.len() - 5));
245 }
246 }
247
248 block.push_str("<!-- /bug-hunter-status -->\n");
249 block
250}
251
252fn remove_existing_status_blocks(content: &str) -> String {
254 let mut result = String::new();
255 let mut in_status_block = false;
256
257 for line in content.lines() {
258 if line.contains("<!-- bug-hunter-status -->") {
259 in_status_block = true;
260 continue;
261 }
262 if line.contains("<!-- /bug-hunter-status -->") {
263 in_status_block = false;
264 continue;
265 }
266 if !in_status_block {
267 result.push_str(line);
268 result.push('\n');
269 }
270 }
271
272 result
273}
274
275fn find_claim_end(content: &str, claim_id: &str) -> Option<usize> {
277 let mut offset = 0;
278
279 for line in content.lines() {
280 offset += line.len() + 1; if line.contains("###") && line.contains(claim_id) {
283 return Some(offset);
284 }
285 }
286
287 None
288}
289
290pub fn find_implementations(claim: &SpecClaim, project_path: &Path) -> Vec<CodeLocation> {
292 let mut locations = Vec::new();
293
294 let pattern = &claim.id;
296
297 if let Ok(entries) = glob::glob(&format!("{}/**/*.rs", project_path.display())) {
298 for entry in entries.flatten() {
299 if let Ok(content) = fs::read_to_string(&entry) {
300 for (line_num, line) in content.lines().enumerate() {
301 if line.contains(pattern) {
302 let context = line.trim().chars().take(60).collect::<String>();
304 locations.push(CodeLocation {
305 file: entry.clone(),
306 line: line_num + 1,
307 context,
308 });
309 }
310 }
311 }
312 }
313 }
314
315 locations
316}
317
318pub fn map_findings_to_claims(
320 claims: &[SpecClaim],
321 findings: &[Finding],
322 project_path: &Path,
323) -> HashMap<String, Vec<Finding>> {
324 let mut mapping: HashMap<String, Vec<Finding>> = HashMap::new();
325
326 for claim in claims {
328 mapping.insert(claim.id.clone(), Vec::new());
329 }
330
331 for finding in findings {
333 for claim in claims {
335 let implementations = find_implementations(claim, project_path);
336 for impl_loc in &implementations {
337 if finding.file == impl_loc.file {
339 let distance = (finding.line as i64 - impl_loc.line as i64).unsigned_abs();
340 if distance < 50 {
341 mapping.entry(claim.id.clone()).or_default().push(finding.clone());
343 break;
344 }
345 }
346 }
347 }
348 }
349
350 mapping
351}
352
353#[cfg(test)]
354#[path = "spec_tests.rs"]
355mod tests;