mod claims;
mod tasks;
mod verify;
use std::collections::{HashMap, HashSet};
use std::fs;
use std::path::Path;
pub use claims::{Claim, ClaimConfidence, ClaimType};
pub use tasks::{AuditTask, AuditTaskStatus};
pub use verify::VerifyResult;
use regex::Regex;
use crate::{component, git, module, Result};
fn is_zero(v: &usize) -> bool {
*v == 0
}
#[derive(Debug, Clone, serde::Serialize)]
pub struct PriorityDoc {
pub doc: String,
pub reason: String,
pub changed_files_referenced: Vec<String>,
pub code_examples: usize,
pub action: String,
}
#[derive(Debug, Clone, serde::Serialize)]
pub struct UndocumentedFeature {
pub name: String,
pub source_file: String,
pub line: usize,
pub pattern: String,
}
#[derive(Debug, Clone, serde::Serialize)]
pub struct BrokenReference {
pub doc: String,
pub line: usize,
pub claim: String,
pub confidence: ClaimConfidence,
#[serde(skip_serializing_if = "Option::is_none")]
pub doc_context: Option<Vec<String>>,
pub action: String,
}
#[derive(Debug, Clone, serde::Serialize)]
pub struct AlignmentSummary {
pub docs_scanned: usize,
pub priority_docs: usize,
pub broken_references: usize,
pub unchanged_docs: usize,
#[serde(skip_serializing_if = "is_zero")]
pub total_features: usize,
#[serde(skip_serializing_if = "is_zero")]
pub documented_features: usize,
#[serde(skip_serializing_if = "is_zero")]
pub undocumented_features: usize,
}
#[derive(Debug, Clone, serde::Serialize)]
pub struct AuditResult {
pub component_id: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub baseline_ref: Option<String>,
pub summary: AlignmentSummary,
pub changed_files: Vec<String>,
pub priority_docs: Vec<PriorityDoc>,
pub broken_references: Vec<BrokenReference>,
pub undocumented_features: Vec<UndocumentedFeature>,
}
pub fn audit_path(path: &str, docs_dir_override: Option<&str>) -> Result<AuditResult> {
let source_path = Path::new(path);
if !source_path.is_dir() {
return Err(crate::Error::validation_invalid_argument(
"path",
format!("'{}' is not a directory", path),
Some(path.to_string()),
None,
));
}
let label = source_path
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("unknown")
.to_string();
let docs_dir = docs_dir_override.unwrap_or("docs");
let docs_dirs = vec![docs_dir.to_string()];
let docs_path = source_path.join(docs_dir);
let doc_files = find_doc_files(&docs_path, None);
let docs_scanned = doc_files.len();
let mut all_claims = Vec::new();
let mut doc_contents: HashMap<String, String> = HashMap::new();
for doc_file in &doc_files {
let doc_path = docs_path.join(doc_file);
if let Ok(content) = fs::read_to_string(&doc_path) {
let claims = claims::extract_claims(&content, doc_file, &[]);
all_claims.extend(claims);
doc_contents.insert(doc_file.clone(), content);
}
}
let mut tasks = Vec::new();
for claim in all_claims {
let result = verify::verify_claim(&claim, source_path, &docs_path, None);
let task = tasks::build_task(claim, result);
tasks.push(task);
}
let changed_files = git::get_uncommitted_changes(path)
.map(|u| {
let mut files = Vec::new();
files.extend(u.staged);
files.extend(u.unstaged);
files.sort();
files.dedup();
files
})
.unwrap_or_default();
let priority_docs = build_priority_docs(&tasks, &changed_files);
let broken_references = extract_broken_references(&tasks, &doc_contents);
let feature_result = detect_features(&[], source_path, &docs_dirs, None);
let docs_with_issues: HashSet<_> = priority_docs
.iter()
.map(|p| &p.doc)
.chain(broken_references.iter().map(|b| &b.doc))
.collect();
let unchanged_docs = docs_scanned.saturating_sub(docs_with_issues.len());
Ok(AuditResult {
component_id: label,
baseline_ref: None,
summary: AlignmentSummary {
docs_scanned,
priority_docs: priority_docs.len(),
broken_references: broken_references.len(),
unchanged_docs,
total_features: feature_result.total,
documented_features: feature_result.documented,
undocumented_features: feature_result.undocumented.len(),
},
changed_files,
priority_docs,
broken_references,
undocumented_features: feature_result.undocumented,
})
}
pub fn audit_component(component_id: &str, docs_dir_override: Option<&str>) -> Result<AuditResult> {
let comp = component::load(component_id)?;
let source_path = Path::new(&comp.local_path);
let docs_dirs: Vec<String> = if let Some(override_dir) = docs_dir_override {
vec![override_dir.to_string()]
} else if !comp.docs_dirs.is_empty() {
comp.docs_dirs.clone()
} else {
vec![comp.docs_dir.as_deref().unwrap_or("docs").to_string()]
};
let docs_path = source_path.join(&docs_dirs[0]);
let changelog_exclude = comp.changelog_target.as_deref().or(Some("CHANGELOG.md"));
let ignore_patterns = collect_module_ignore_patterns(&comp);
let feature_patterns = collect_module_feature_patterns(&comp);
let doc_files = find_doc_files(&docs_path, changelog_exclude);
let docs_scanned = doc_files.len();
let mut all_claims = Vec::new();
let mut doc_contents: HashMap<String, String> = HashMap::new();
for doc_file in &doc_files {
let doc_path = docs_path.join(doc_file);
if let Ok(content) = fs::read_to_string(&doc_path) {
let claims = claims::extract_claims(&content, doc_file, &ignore_patterns);
all_claims.extend(claims);
doc_contents.insert(doc_file.clone(), content);
}
}
let mut tasks = Vec::new();
for claim in all_claims {
let result = verify::verify_claim(&claim, source_path, &docs_path, Some(component_id));
let task = tasks::build_task(claim, result);
tasks.push(task);
}
let (changed_files, baseline_ref) = get_changed_files(component_id);
let priority_docs = build_priority_docs(&tasks, &changed_files);
let broken_references = extract_broken_references(&tasks, &doc_contents);
let feature_result = detect_features(
&feature_patterns,
source_path,
&docs_dirs,
changelog_exclude,
);
let docs_with_issues: HashSet<_> = priority_docs
.iter()
.map(|p| &p.doc)
.chain(broken_references.iter().map(|b| &b.doc))
.collect();
let unchanged_docs = docs_scanned.saturating_sub(docs_with_issues.len());
Ok(AuditResult {
component_id: component_id.to_string(),
baseline_ref,
summary: AlignmentSummary {
docs_scanned,
priority_docs: priority_docs.len(),
broken_references: broken_references.len(),
unchanged_docs,
total_features: feature_result.total,
documented_features: feature_result.documented,
undocumented_features: feature_result.undocumented.len(),
},
changed_files,
priority_docs,
broken_references,
undocumented_features: feature_result.undocumented,
})
}
fn find_doc_files(docs_path: &Path, exclude_changelog: Option<&str>) -> Vec<String> {
let mut docs = Vec::new();
if !docs_path.exists() {
return docs;
}
let changelog_filename = exclude_changelog
.and_then(|p| Path::new(p).file_name())
.and_then(|n| n.to_str())
.map(|s| s.to_lowercase());
fn scan_docs(
dir: &Path,
prefix: &str,
docs: &mut Vec<String>,
changelog_filename: &Option<String>,
) {
if let Ok(entries) = fs::read_dir(dir) {
for entry in entries.flatten() {
let path = entry.path();
let name = entry.file_name().to_string_lossy().to_string();
if name.starts_with('.') {
continue;
}
if path.is_file() && name.ends_with(".md") {
if let Some(changelog) = changelog_filename {
if name.to_lowercase() == *changelog {
continue;
}
}
let relative = if prefix.is_empty() {
name
} else {
format!("{}/{}", prefix, name)
};
docs.push(relative);
} else if path.is_dir() {
let new_prefix = if prefix.is_empty() {
name.clone()
} else {
format!("{}/{}", prefix, name)
};
scan_docs(&path, &new_prefix, docs, changelog_filename);
}
}
}
}
scan_docs(docs_path, "", &mut docs, &changelog_filename);
docs.sort();
docs
}
fn get_changed_files(component_id: &str) -> (Vec<String>, Option<String>) {
let changes = match git::changes(Some(component_id), None, true) {
Ok(c) => c,
Err(_) => return (vec![], None),
};
let mut files: Vec<String> = Vec::new();
files.extend(changes.uncommitted.staged.iter().cloned());
files.extend(changes.uncommitted.unstaged.iter().cloned());
if let Some(ref diff) = changes.diff {
files.extend(parse_diff_file_paths(diff));
}
files.sort();
files.dedup();
(files, changes.baseline_ref)
}
fn parse_diff_file_paths(diff: &str) -> Vec<String> {
diff.lines()
.filter(|line| line.starts_with("diff --git "))
.filter_map(|line| {
line.split(" b/").nth(1).map(|s| s.to_string())
})
.collect()
}
fn build_priority_docs(tasks: &[AuditTask], changed_files: &[String]) -> Vec<PriorityDoc> {
let mut docs_map: HashMap<String, Vec<&AuditTask>> = HashMap::new();
for task in tasks {
docs_map.entry(task.doc.clone()).or_default().push(task);
}
let mut priority_docs: Vec<PriorityDoc> = Vec::new();
for (doc, doc_tasks) in docs_map {
let referenced_changes: Vec<String> = changed_files
.iter()
.filter(|f| doc_tasks.iter().any(|t| references_file(&t.claim_value, f)))
.cloned()
.collect();
if referenced_changes.is_empty() {
continue; }
let code_examples = doc_tasks
.iter()
.filter(|t| matches!(t.claim_type, ClaimType::CodeExample))
.count();
let action = build_doc_action(&referenced_changes, code_examples);
priority_docs.push(PriorityDoc {
doc,
reason: format!(
"{} referenced source file(s) changed since baseline",
referenced_changes.len()
),
changed_files_referenced: referenced_changes,
code_examples,
action,
});
}
priority_docs.sort_by(|a, b| {
b.changed_files_referenced
.len()
.cmp(&a.changed_files_referenced.len())
});
priority_docs
}
fn references_file(claim_value: &str, changed_file: &str) -> bool {
let claim_normalized = claim_value.trim_start_matches('/');
let file_normalized = changed_file.trim_start_matches('/');
if claim_normalized == file_normalized {
return true;
}
if claim_value.ends_with('/') && file_normalized.starts_with(claim_normalized) {
return true;
}
if let Some(basename) = Path::new(changed_file).file_stem() {
if let Some(name) = basename.to_str() {
if claim_value.contains(name) && name.len() >= 4 {
return true;
}
}
}
false
}
fn build_doc_action(changed_files: &[String], code_examples: usize) -> String {
let files_desc = if changed_files.len() == 1 {
changed_files[0].clone()
} else {
format!("{} files", changed_files.len())
};
if code_examples > 0 {
format!(
"Documentation may be stale: {} code example(s) reference changed source ({}). Update docs to match current implementation.",
code_examples, files_desc
)
} else {
format!(
"Source changed ({}). Review documentation for accuracy against current implementation.",
files_desc
)
}
}
fn extract_broken_references(
tasks: &[AuditTask],
doc_contents: &HashMap<String, String>,
) -> Vec<BrokenReference> {
tasks
.iter()
.filter(|t| matches!(t.status, AuditTaskStatus::Broken))
.map(|t| {
let doc_context = extract_doc_context(doc_contents, &t.doc, t.line);
BrokenReference {
doc: t.doc.clone(),
line: t.line,
claim: t.claim.clone(),
confidence: t.confidence.clone(),
doc_context,
action: t.action.clone().unwrap_or_else(|| {
"Stale reference. Update or remove from documentation.".to_string()
}),
}
})
.collect()
}
fn extract_doc_context(
doc_contents: &HashMap<String, String>,
doc_file: &str,
line: usize,
) -> Option<Vec<String>> {
let content = doc_contents.get(doc_file)?;
let lines: Vec<&str> = content.lines().collect();
if line == 0 || line > lines.len() {
return None;
}
let line_idx = line - 1; let start = line_idx.saturating_sub(1);
let end = (line_idx + 2).min(lines.len());
let context: Vec<String> = (start..end)
.map(|i| format!("{}: {}", i + 1, lines[i]))
.collect();
if context.is_empty() {
None
} else {
Some(context)
}
}
fn collect_module_feature_patterns(comp: &component::Component) -> Vec<String> {
let mut patterns = Vec::new();
if let Some(ref modules) = comp.modules {
for module_id in modules.keys() {
if let Ok(manifest) = module::load_module(module_id) {
patterns.extend(manifest.audit_feature_patterns().to_vec());
}
}
}
patterns
}
struct FeatureDetectionResult {
total: usize,
documented: usize,
undocumented: Vec<UndocumentedFeature>,
}
fn detect_features(
feature_patterns: &[String],
source_path: &Path,
docs_dirs: &[String],
changelog_exclude: Option<&str>,
) -> FeatureDetectionResult {
let empty = FeatureDetectionResult {
total: 0,
documented: 0,
undocumented: Vec::new(),
};
if feature_patterns.is_empty() {
return empty;
}
let compiled: Vec<(Regex, String)> = feature_patterns
.iter()
.filter_map(|p| Regex::new(p).ok().map(|r| (r, p.clone())))
.collect();
if compiled.is_empty() {
return empty;
}
let mut all_doc_parts: Vec<String> = Vec::new();
for docs_dir in docs_dirs {
let docs_path = source_path.join(docs_dir);
let doc_files = find_doc_files(&docs_path, changelog_exclude);
for f in &doc_files {
if let Ok(content) = fs::read_to_string(docs_path.join(f)) {
all_doc_parts.push(content);
}
}
}
for readme in &["README.md", "readme.md", "README.txt", "readme.txt"] {
let readme_path = source_path.join(readme);
if readme_path.exists() {
if let Ok(content) = fs::read_to_string(&readme_path) {
all_doc_parts.push(content);
}
}
}
let all_doc_content = all_doc_parts.join("\n");
let source_files = find_source_files(source_path);
let mut undocumented = Vec::new();
let mut seen_names: HashSet<String> = HashSet::new();
let mut documented_count: usize = 0;
for file in &source_files {
let file_path = source_path.join(file);
let content = match fs::read_to_string(&file_path) {
Ok(c) => c,
Err(_) => continue,
};
let line_offsets: Vec<usize> = std::iter::once(0)
.chain(content.match_indices('\n').map(|(i, _)| i + 1))
.collect();
for (regex, pattern) in &compiled {
for caps in regex.captures_iter(&content) {
if let Some(name_match) = caps.get(1) {
let name = name_match.as_str().to_string();
if seen_names.contains(&name) {
continue;
}
seen_names.insert(name.clone());
if all_doc_content.contains(&name) {
documented_count += 1;
} else {
let byte_pos = name_match.start();
let line_num = line_offsets.partition_point(|&offset| offset <= byte_pos);
undocumented.push(UndocumentedFeature {
name,
source_file: file.clone(),
line: line_num,
pattern: pattern.clone(),
});
}
}
}
}
}
FeatureDetectionResult {
total: seen_names.len(),
documented: documented_count,
undocumented,
}
}
const SKIP_DIRS: &[&str] = &[
"vendor",
"node_modules",
".git",
"target",
"build",
"dist",
"__pycache__",
".svn",
];
fn find_source_files(source_path: &Path) -> Vec<String> {
let mut files = Vec::new();
collect_source_files(source_path, source_path, &mut files);
files.sort();
files
}
fn collect_source_files(base: &Path, dir: &Path, files: &mut Vec<String>) {
let entries = match fs::read_dir(dir) {
Ok(e) => e,
Err(_) => return,
};
for entry in entries.flatten() {
let path = entry.path();
let name = entry.file_name().to_string_lossy().to_string();
if name.starts_with('.') {
continue;
}
if path.is_dir() {
if SKIP_DIRS.contains(&name.as_str()) {
continue;
}
collect_source_files(base, &path, files);
} else if path.is_file() && !name.ends_with(".md") {
if let Ok(relative) = path.strip_prefix(base) {
files.push(relative.to_string_lossy().to_string());
}
}
}
}
fn collect_module_ignore_patterns(comp: &component::Component) -> Vec<String> {
let mut patterns = Vec::new();
if let Some(ref modules) = comp.modules {
for module_id in modules.keys() {
if let Ok(manifest) = module::load_module(module_id) {
patterns.extend(manifest.audit_ignore_claim_patterns().to_vec());
}
}
}
patterns
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_diff_file_paths_basic() {
let diff = r#"diff --git a/src/main.rs b/src/main.rs
index abc123..def456 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -1,3 +1,4 @@
+// New comment
fn main() {}
diff --git a/src/lib.rs b/src/lib.rs
index 111222..333444 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -1 +1 @@
-old
+new
"#;
let files = parse_diff_file_paths(diff);
assert_eq!(files, vec!["src/main.rs", "src/lib.rs"]);
}
#[test]
fn test_parse_diff_file_paths_empty() {
let diff = "";
let files = parse_diff_file_paths(diff);
assert!(files.is_empty());
}
#[test]
fn test_parse_diff_file_paths_no_diff_lines() {
let diff = "Some random text\nwithout diff headers";
let files = parse_diff_file_paths(diff);
assert!(files.is_empty());
}
#[test]
fn test_references_file_exact_match() {
assert!(references_file("src/main.rs", "src/main.rs"));
assert!(references_file("/src/main.rs", "src/main.rs"));
assert!(references_file("src/main.rs", "/src/main.rs"));
}
#[test]
fn test_references_file_directory_contains() {
assert!(references_file("inc/Engine/", "inc/Engine/AI/Tools.php"));
assert!(references_file("/inc/Engine/", "inc/Engine/AI/Tools.php"));
}
#[test]
fn test_references_file_directory_no_match() {
assert!(!references_file("inc/Engine/", "inc/Other/file.php"));
assert!(!references_file("inc/Engine", "inc/Engine/AI/Tools.php"));
}
#[test]
fn test_references_file_basename_match() {
assert!(references_file(
"ToolExecutor::run()",
"inc/Engine/ToolExecutor.php"
));
assert!(references_file("new BaseTool()", "src/tools/BaseTool.rs"));
}
#[test]
fn test_references_file_basename_short_name_no_match() {
assert!(!references_file("use AI;", "src/AI.php"));
}
#[test]
fn test_references_file_no_match() {
assert!(!references_file("totally/different.rs", "src/main.rs"));
assert!(!references_file("random text", "src/file.rs"));
}
#[test]
fn test_build_doc_action_single_file() {
let action = build_doc_action(&["src/main.rs".to_string()], 0);
assert!(action.contains("src/main.rs"));
assert!(action.contains("Source changed"));
}
#[test]
fn test_build_doc_action_multiple_files() {
let action = build_doc_action(&["src/main.rs".to_string(), "src/lib.rs".to_string()], 0);
assert!(action.contains("2 files"));
}
#[test]
fn test_build_doc_action_with_code_examples() {
let action = build_doc_action(&["src/main.rs".to_string()], 3);
assert!(action.contains("3 code example(s)"));
assert!(action.contains("stale"));
}
#[test]
fn test_extract_broken_references() {
let tasks = vec![
AuditTask {
doc: "api/index.md".to_string(),
line: 10,
claim: "file path `src/old.rs`".to_string(),
claim_type: ClaimType::FilePath,
claim_value: "src/old.rs".to_string(),
confidence: ClaimConfidence::Real,
status: AuditTaskStatus::Broken,
action: Some("File no longer exists".to_string()),
},
AuditTask {
doc: "api/index.md".to_string(),
line: 20,
claim: "file path `src/main.rs`".to_string(),
claim_type: ClaimType::FilePath,
claim_value: "src/main.rs".to_string(),
confidence: ClaimConfidence::Real,
status: AuditTaskStatus::Verified,
action: None,
},
];
let doc_contents = HashMap::new(); let broken = extract_broken_references(&tasks, &doc_contents);
assert_eq!(broken.len(), 1);
assert_eq!(broken[0].doc, "api/index.md");
assert_eq!(broken[0].line, 10);
assert_eq!(broken[0].action, "File no longer exists");
assert!(broken[0].doc_context.is_none()); }
#[test]
fn test_build_priority_docs_filters_by_changed_files() {
let tasks = vec![
AuditTask {
doc: "api/tools.md".to_string(),
line: 10,
claim: "file path `inc/ToolExecutor.php`".to_string(),
claim_type: ClaimType::FilePath,
claim_value: "inc/ToolExecutor.php".to_string(),
confidence: ClaimConfidence::Real,
status: AuditTaskStatus::Verified,
action: None,
},
AuditTask {
doc: "api/other.md".to_string(),
line: 5,
claim: "file path `inc/Unrelated.php`".to_string(),
claim_type: ClaimType::FilePath,
claim_value: "inc/Unrelated.php".to_string(),
confidence: ClaimConfidence::Real,
status: AuditTaskStatus::Verified,
action: None,
},
];
let changed_files = vec!["inc/ToolExecutor.php".to_string()];
let priority = build_priority_docs(&tasks, &changed_files);
assert_eq!(priority.len(), 1);
assert_eq!(priority[0].doc, "api/tools.md");
assert_eq!(
priority[0].changed_files_referenced,
vec!["inc/ToolExecutor.php"]
);
}
#[test]
fn test_build_priority_docs_sorts_by_impact() {
let tasks = vec![
AuditTask {
doc: "doc_one.md".to_string(),
line: 1,
claim_type: ClaimType::FilePath,
claim: "".to_string(),
claim_value: "file1.rs".to_string(),
confidence: ClaimConfidence::Real,
status: AuditTaskStatus::Verified,
action: None,
},
AuditTask {
doc: "doc_two.md".to_string(),
line: 1,
claim_type: ClaimType::FilePath,
claim: "".to_string(),
claim_value: "file1.rs".to_string(),
confidence: ClaimConfidence::Real,
status: AuditTaskStatus::Verified,
action: None,
},
AuditTask {
doc: "doc_two.md".to_string(),
line: 2,
claim_type: ClaimType::FilePath,
claim: "".to_string(),
claim_value: "file2.rs".to_string(),
confidence: ClaimConfidence::Real,
status: AuditTaskStatus::Verified,
action: None,
},
];
let changed_files = vec!["file1.rs".to_string(), "file2.rs".to_string()];
let priority = build_priority_docs(&tasks, &changed_files);
assert_eq!(priority.len(), 2);
assert_eq!(priority[0].doc, "doc_two.md");
assert_eq!(priority[0].changed_files_referenced.len(), 2);
assert_eq!(priority[1].doc, "doc_one.md");
assert_eq!(priority[1].changed_files_referenced.len(), 1);
}
#[test]
fn test_detect_features_finds_missing() {
let dir = tempfile::tempdir().unwrap();
let source_path = dir.path();
let docs_path = source_path.join("docs");
fs::create_dir_all(&docs_path).unwrap();
fs::write(
source_path.join("plugin.js"),
"registerStepType('coolStep', handler);\nregisterStepType('docStep', handler);\n",
)
.unwrap();
fs::write(
docs_path.join("guide.md"),
"Use the docStep to do things.\n",
)
.unwrap();
let patterns = vec![r#"registerStepType\(\s*'(\w+)'"#.to_string()];
let result = detect_features(&patterns, source_path, &["docs".to_string()], None);
assert_eq!(result.total, 2);
assert_eq!(result.documented, 1);
assert_eq!(result.undocumented.len(), 1);
assert_eq!(result.undocumented[0].name, "coolStep");
assert_eq!(result.undocumented[0].source_file, "plugin.js");
assert_eq!(result.undocumented[0].line, 1);
}
#[test]
fn test_detect_features_empty_when_no_patterns() {
let dir = tempfile::tempdir().unwrap();
let result = detect_features(&[], dir.path(), &["docs".to_string()], None);
assert_eq!(result.total, 0);
assert!(result.undocumented.is_empty());
}
#[test]
fn test_detect_features_skips_md_files() {
let dir = tempfile::tempdir().unwrap();
let source_path = dir.path();
let docs_path = source_path.join("docs");
fs::create_dir_all(&docs_path).unwrap();
fs::write(
source_path.join("notes.md"),
"registerStepType('hidden', h);\n",
)
.unwrap();
let patterns = vec![r#"registerStepType\(\s*'(\w+)'"#.to_string()];
let result = detect_features(&patterns, source_path, &["docs".to_string()], None);
assert_eq!(result.total, 0);
assert!(result.undocumented.is_empty());
}
#[test]
fn test_detect_features_all_documented() {
let dir = tempfile::tempdir().unwrap();
let source_path = dir.path();
let docs_path = source_path.join("docs");
fs::create_dir_all(&docs_path).unwrap();
fs::write(
source_path.join("plugin.js"),
"registerStepType('myStep', handler);\n",
)
.unwrap();
fs::write(
docs_path.join("guide.md"),
"The myStep feature does things.\n",
)
.unwrap();
let patterns = vec![r#"registerStepType\(\s*'(\w+)'"#.to_string()];
let result = detect_features(&patterns, source_path, &["docs".to_string()], None);
assert_eq!(result.total, 1);
assert_eq!(result.documented, 1);
assert!(result.undocumented.is_empty());
}
#[test]
fn test_detect_features_scans_subdirectories() {
let dir = tempfile::tempdir().unwrap();
let source_path = dir.path();
let docs_path = source_path.join("docs");
let inc_path = source_path.join("inc").join("Api");
fs::create_dir_all(&docs_path).unwrap();
fs::create_dir_all(&inc_path).unwrap();
fs::write(
inc_path.join("Routes.php"),
"register_rest_route('myplugin/v1', '/items', []);\n",
)
.unwrap();
let patterns = vec['"]"#.to_string()];
let result = detect_features(&patterns, source_path, &["docs".to_string()], None);
assert_eq!(result.undocumented.len(), 1);
assert_eq!(result.undocumented[0].name, "myplugin/v1");
assert!(result.undocumented[0].source_file.contains("Routes.php"));
}
#[test]
fn test_detect_features_skips_vendor() {
let dir = tempfile::tempdir().unwrap();
let source_path = dir.path();
let docs_path = source_path.join("docs");
let vendor_path = source_path.join("vendor").join("lib");
fs::create_dir_all(&docs_path).unwrap();
fs::create_dir_all(&vendor_path).unwrap();
fs::write(
vendor_path.join("plugin.php"),
"register_rest_route('vendor/v1', '/stuff', []);\n",
)
.unwrap();
let patterns = vec['"]"#.to_string()];
let result = detect_features(&patterns, source_path, &["docs".to_string()], None);
assert_eq!(result.total, 0);
assert!(result.undocumented.is_empty());
}
#[test]
fn test_detect_features_deduplicates() {
let dir = tempfile::tempdir().unwrap();
let source_path = dir.path();
let docs_path = source_path.join("docs");
fs::create_dir_all(&docs_path).unwrap();
fs::write(
source_path.join("a.js"),
"registerStepType('myStep', handler);\n",
)
.unwrap();
fs::write(
source_path.join("b.js"),
"registerStepType('myStep', handler);\n",
)
.unwrap();
let patterns = vec![r#"registerStepType\(\s*'(\w+)'"#.to_string()];
let result = detect_features(&patterns, source_path, &["docs".to_string()], None);
assert_eq!(result.total, 1); assert_eq!(result.undocumented.len(), 1);
}
#[test]
fn test_detect_features_reads_readme() {
let dir = tempfile::tempdir().unwrap();
let source_path = dir.path();
let docs_path = source_path.join("docs");
fs::create_dir_all(&docs_path).unwrap();
fs::write(
source_path.join("plugin.js"),
"registerStepType('readmeStep', handler);\nregisterStepType('hiddenStep', handler);\n",
)
.unwrap();
fs::write(
source_path.join("README.md"),
"This plugin provides readmeStep for automation.\n",
)
.unwrap();
let patterns = vec![r#"registerStepType\(\s*'(\w+)'"#.to_string()];
let result = detect_features(&patterns, source_path, &["docs".to_string()], None);
assert_eq!(result.total, 2);
assert_eq!(result.documented, 1);
assert_eq!(result.undocumented.len(), 1);
assert_eq!(result.undocumented[0].name, "hiddenStep");
}
#[test]
fn test_detect_features_multiple_dirs() {
let dir = tempfile::tempdir().unwrap();
let source_path = dir.path();
let docs_path = source_path.join("docs");
let wiki_path = source_path.join("wiki");
fs::create_dir_all(&docs_path).unwrap();
fs::create_dir_all(&wiki_path).unwrap();
fs::write(
source_path.join("plugin.js"),
"registerStepType('wikiStep', handler);\nregisterStepType('orphanStep', handler);\n",
)
.unwrap();
fs::write(
wiki_path.join("features.md"),
"The wikiStep handles wiki operations.\n",
)
.unwrap();
let patterns = vec![r#"registerStepType\(\s*'(\w+)'"#.to_string()];
let result = detect_features(&patterns, source_path, &["docs".to_string()], None);
assert_eq!(result.total, 2);
assert_eq!(result.undocumented.len(), 2);
let result = detect_features(
&patterns,
source_path,
&["docs".to_string(), "wiki".to_string()],
None,
);
assert_eq!(result.total, 2);
assert_eq!(result.documented, 1);
assert_eq!(result.undocumented.len(), 1);
assert_eq!(result.undocumented[0].name, "orphanStep");
}
#[test]
fn test_extract_doc_context_with_content() {
let mut doc_contents = HashMap::new();
doc_contents.insert(
"api/tools.md".to_string(),
"# Tools\n\nSee `src/old.rs` for details.\n\nMore content here.\n".to_string(),
);
let context = extract_doc_context(&doc_contents, "api/tools.md", 3);
assert!(context.is_some());
let lines = context.unwrap();
assert_eq!(lines.len(), 3); assert!(lines[0].starts_with("2:"));
assert!(lines[1].contains("src/old.rs"));
assert!(lines[2].starts_with("4:"));
}
#[test]
fn test_extract_doc_context_first_line() {
let mut doc_contents = HashMap::new();
doc_contents.insert("test.md".to_string(), "# Title\nSecond line\n".to_string());
let context = extract_doc_context(&doc_contents, "test.md", 1);
assert!(context.is_some());
let lines = context.unwrap();
assert_eq!(lines.len(), 2); assert!(lines[0].starts_with("1:"));
}
#[test]
fn test_extract_doc_context_missing_doc() {
let doc_contents = HashMap::new();
let context = extract_doc_context(&doc_contents, "nonexistent.md", 1);
assert!(context.is_none());
}
#[test]
fn test_broken_reference_includes_context() {
let tasks = vec![AuditTask {
doc: "api/tools.md".to_string(),
line: 2,
claim: "file path `src/old.rs`".to_string(),
claim_type: ClaimType::FilePath,
claim_value: "src/old.rs".to_string(),
confidence: ClaimConfidence::Real,
status: AuditTaskStatus::Broken,
action: Some("File no longer exists".to_string()),
}];
let mut doc_contents = HashMap::new();
doc_contents.insert(
"api/tools.md".to_string(),
"# API\nSee `src/old.rs` for the tool.\nMore info below.\n".to_string(),
);
let broken = extract_broken_references(&tasks, &doc_contents);
assert_eq!(broken.len(), 1);
assert!(broken[0].doc_context.is_some());
let ctx = broken[0].doc_context.as_ref().unwrap();
assert!(ctx.iter().any(|l| l.contains("src/old.rs")));
}
#[test]
fn test_find_doc_files_excludes_changelog_by_default() {
let dir = tempfile::tempdir().unwrap();
let docs_path = dir.path();
fs::write(docs_path.join("guide.md"), "# Guide\n").unwrap();
fs::write(
docs_path.join("CHANGELOG.md"),
"# Changelog\n## v1.0\n- Removed old/path.rs\n",
)
.unwrap();
fs::write(docs_path.join("api.md"), "# API\n").unwrap();
let files = find_doc_files(docs_path, Some("CHANGELOG.md"));
assert_eq!(files.len(), 2);
assert!(files.contains(&"api.md".to_string()));
assert!(files.contains(&"guide.md".to_string()));
assert!(!files.iter().any(|f| f.to_lowercase().contains("changelog")));
}
#[test]
fn test_find_doc_files_changelog_exclusion_case_insensitive() {
let dir = tempfile::tempdir().unwrap();
let docs_path = dir.path();
fs::write(docs_path.join("guide.md"), "# Guide\n").unwrap();
fs::write(docs_path.join("changelog.md"), "# Changes\n").unwrap();
let files = find_doc_files(docs_path, Some("CHANGELOG.md"));
assert_eq!(files.len(), 1);
assert_eq!(files[0], "guide.md");
}
#[test]
fn test_find_doc_files_no_exclusion_when_none() {
let dir = tempfile::tempdir().unwrap();
let docs_path = dir.path();
fs::write(docs_path.join("guide.md"), "# Guide\n").unwrap();
fs::write(docs_path.join("CHANGELOG.md"), "# Changelog\n").unwrap();
let files = find_doc_files(docs_path, None);
assert_eq!(files.len(), 2);
assert!(files.iter().any(|f| f == "CHANGELOG.md"));
}
#[test]
fn test_find_doc_files_custom_changelog_target() {
let dir = tempfile::tempdir().unwrap();
let docs_path = dir.path();
fs::write(docs_path.join("guide.md"), "# Guide\n").unwrap();
fs::write(docs_path.join("CHANGELOG.md"), "# Changelog\n").unwrap();
fs::write(docs_path.join("CHANGES.md"), "# Changes\n").unwrap();
let files = find_doc_files(docs_path, Some("CHANGES.md"));
assert_eq!(files.len(), 2);
assert!(files.contains(&"CHANGELOG.md".to_string()));
assert!(files.contains(&"guide.md".to_string()));
assert!(!files.iter().any(|f| f == "CHANGES.md"));
}
#[test]
fn test_build_priority_docs_empty_when_no_changes() {
let tasks = vec![AuditTask {
doc: "api/tools.md".to_string(),
line: 10,
claim: "file path `inc/Tool.php`".to_string(),
claim_type: ClaimType::FilePath,
claim_value: "inc/Tool.php".to_string(),
confidence: ClaimConfidence::Real,
status: AuditTaskStatus::Verified,
action: None,
}];
let changed_files: Vec<String> = vec![];
let priority = build_priority_docs(&tasks, &changed_files);
assert!(priority.is_empty());
}
}