use std::collections::{HashMap, HashSet};
use std::fs;
use std::path::Path;
fn extract_fields_from_config_file(file_path: &Path) -> HashSet<String> {
let content = fs::read_to_string(file_path).unwrap_or_default();
let mut fields = HashSet::new();
let mut in_struct = false;
let mut brace_depth = 0;
let mut pending_rename: Option<String> = None;
let mut pending_skip = false;
for line in content.lines() {
let trimmed = line.trim();
if trimmed.contains("pub struct MD") && trimmed.contains("Config") {
in_struct = true;
if trimmed.contains('{') {
brace_depth = 1;
}
continue;
}
if in_struct {
brace_depth += trimmed.matches('{').count();
brace_depth -= trimmed.matches('}').count();
if brace_depth == 0 {
in_struct = false;
pending_rename = None;
pending_skip = false;
continue;
}
if trimmed.contains("#[serde") && trimmed.contains("skip") {
pending_skip = true;
continue;
}
if trimmed.contains("#[serde") && trimmed.contains("rename") {
if let Some(start) = trimmed.find("rename = \"")
&& let Some(end) = trimmed[start + 10..].find('"')
{
let renamed = &trimmed[start + 10..start + 10 + end];
pending_rename = Some(renamed.to_string());
}
continue;
}
if trimmed.starts_with("pub ")
&& trimmed.contains(':')
&& let Some(field_part) = trimmed.strip_prefix("pub ")
&& let Some(colon_pos) = field_part.find(':')
{
if pending_skip {
pending_skip = false;
pending_rename = None;
continue;
}
if let Some(renamed) = pending_rename.take() {
fields.insert(renamed);
} else {
let field_name = field_part[..colon_pos].trim();
let kebab_name = field_name.replace('_', "-");
fields.insert(kebab_name);
}
}
}
}
fields
}
fn find_all_config_files() -> HashMap<String, Vec<std::path::PathBuf>> {
let mut config_files = HashMap::new();
let rules_dir = Path::new("src/rules");
if let Ok(entries) = fs::read_dir(rules_dir) {
for entry in entries.flatten() {
let path = entry.path();
if path.is_dir() {
let dir_name = path.file_name().unwrap().to_str().unwrap();
if let Some(rule_prefix) = dir_name.strip_prefix("md")
&& let Some(num_end) = rule_prefix.find('_')
{
let rule_num = &rule_prefix[..num_end];
if let Ok(num) = rule_num.parse::<u32>() {
let rule_name = format!("MD{num:03}");
let config_file = path.join(format!("md{rule_num}_config.rs"));
if config_file.exists() {
config_files.entry(rule_name).or_insert_with(Vec::new).push(config_file);
}
}
}
} else if path.is_file() {
if let Some(filename) = path.file_name().and_then(|n| n.to_str())
&& filename.starts_with("md")
&& filename.ends_with(".rs")
&& let Some(rule_prefix) = filename.strip_prefix("md").and_then(|s| s.strip_suffix(".rs"))
&& let Some(num_end) = rule_prefix.find('_')
{
let rule_num = &rule_prefix[..num_end];
if let Ok(num) = rule_num.parse::<u32>() {
let rule_name = format!("MD{num:03}");
let content = fs::read_to_string(&path).unwrap_or_default();
if content.contains(&format!("pub struct {rule_name}Config")) {
config_files.entry(rule_name).or_insert_with(Vec::new).push(path);
}
}
}
}
}
}
config_files
}
fn get_documented_fields_in_file(doc_path: &Path) -> HashSet<String> {
let content = fs::read_to_string(doc_path).unwrap_or_default();
let mut fields = HashSet::new();
let mut in_toml_block = false;
for line in content.lines() {
let trimmed = line.trim();
if trimmed == "```toml" {
in_toml_block = true;
continue;
}
if trimmed == "```" {
in_toml_block = false;
continue;
}
if in_toml_block {
if trimmed.starts_with('[') && trimmed.ends_with(']') {
continue;
}
if trimmed.starts_with('#') {
continue;
}
if let Some(equals_pos) = trimmed.find('=') {
let field = trimmed[..equals_pos].trim();
let field = if let Some(comment_pos) = field.find('#') {
field[..comment_pos].trim()
} else {
field
};
if !field.is_empty() {
fields.insert(field.to_string());
}
}
}
}
fields
}
#[test]
fn test_all_config_fields_are_documented() {
let config_files = find_all_config_files();
let mut all_passed = true;
let mut report = String::from("\n=== Config Documentation Validation ===\n\n");
let mut rules: Vec<_> = config_files.keys().cloned().collect();
rules.sort();
for rule_name in &rules {
let files = &config_files[rule_name];
let doc_path = Path::new("docs").join(format!("{}.md", rule_name.to_lowercase()));
if !doc_path.exists() {
report.push_str(&format!("⚠️ {rule_name}: No documentation file found\n"));
continue;
}
let mut config_fields = HashSet::new();
for file in files {
let fields = extract_fields_from_config_file(file);
config_fields.extend(fields);
}
if config_fields.is_empty() {
continue;
}
let documented_fields = get_documented_fields_in_file(&doc_path);
let mut undocumented: Vec<String> = config_fields
.iter()
.filter(|f| !documented_fields.contains(*f))
.cloned()
.collect();
undocumented.sort();
if !undocumented.is_empty() {
report.push_str(&format!("❌ {rule_name}: Undocumented config fields:\n"));
for field in &undocumented {
report.push_str(&format!(" - {field}\n"));
}
let rule_lower = rule_name.to_lowercase();
report.push_str(&format!(" File: docs/{rule_lower}.md\n"));
all_passed = false;
} else {
let count = config_fields.len();
report.push_str(&format!("✅ {rule_name}: All {count} config fields documented\n"));
}
}
let count = rules.len();
report.push_str(&format!("\n=== Summary: {count} rules checked ===\n"));
println!("{report}");
if !all_passed {
panic!(
"\n\n❌ Some config fields are not documented!\n\
Please add documentation for the fields listed above.\n\
Documentation files are in docs/mdXXX.md\n\
\n\
To fix: Add TOML examples showing the missing config fields.\n"
);
}
}