1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
// Quality gate check implementations - included from quality_gates.rs
// NO use imports or #! attributes - shares parent module scope
impl QAVerification {
fn add_dead_code_checks(checks: &mut Vec<(&'static str, QualityCheck)>) {
// Dead code sanity check
checks.push(("dead_code_sanity", Box::new(|result| {
// First try to get lines from complexity metrics
let total_lines = result.complexity_metrics.as_ref()
.map_or(0, |m| m.files.iter().map(|f| f.total_lines).sum::<usize>());
// If no complexity metrics, try to get from dead code analysis
let total_lines = if total_lines == 0 {
result.dead_code_analysis.as_ref()
.map_or(0, |d| d.summary.total_lines)
} else {
total_lines
};
// If still no lines, check if we have any files at all
if total_lines == 0 {
// Check if any files were discovered
let file_count = result.file_tree.len();
if file_count == 0 {
return Err("No files discovered in project".into());
}
// For non-analyzable projects (e.g., no Rust/Python/JS), pass with a warning
if file_count > 0 {
return Ok(()); // Pass but with implicit warning that analysis was limited
}
return Err("No lines analyzed - invalid result".into());
}
let dead_lines = result.dead_code_analysis.as_ref()
.map_or(0, |d| d.summary.total_dead_lines);
let ratio = dead_lines as f64 / total_lines as f64;
if ratio == 0.0 && total_lines > 1000 {
// Check if this is legitimate
let has_ffi_or_wasm = result.file_tree.iter()
.any(|path| path.contains("wasm") || path.contains("ffi") || path.contains("bindgen"));
let has_typescript = result.language_stats.as_ref()
.and_then(|stats| stats.get("TypeScript"))
.is_some_and(|&count| count > 0);
let has_python = result.language_stats.as_ref()
.and_then(|stats| stats.get("Python"))
.is_some_and(|&count| count > 0);
if has_ffi_or_wasm {
Err("Zero dead code with FFI/WASM code present - likely false negative".into())
} else if has_typescript || has_python {
Err("Mixed language project with zero dead code - verify cross-language tracing".into())
} else {
// Pure Rust project in early stages might legitimately have no dead code
Ok(())
}
} else if ratio > 0.15 {
Err(format!("Excessive dead code: {:.1}%", ratio * 100.0))
} else {
Ok(())
}
})));
}
fn add_complexity_checks(checks: &mut Vec<(&'static str, QualityCheck)>) {
// Complexity distribution check
checks.push((
"complexity_distribution",
Box::new(|result| {
let functions: Vec<_> = result
.complexity_metrics
.as_ref()
.map(|m| m.files.iter().flat_map(|f| &f.functions).collect())
.unwrap_or_default();
if functions.len() < 50 {
return Ok(()); // Too small for distribution analysis
}
// Calculate coefficient of variation
let mean = functions
.iter()
.map(|f| f64::from(f.cyclomatic))
.sum::<f64>()
/ functions.len() as f64;
if mean == 0.0 {
return Err("All functions have zero complexity - parser error".into());
}
let variance = functions
.iter()
.map(|f| (f64::from(f.cyclomatic) - mean).powi(2))
.sum::<f64>()
/ functions.len() as f64;
let cv = (variance.sqrt() / mean) * 100.0;
if cv < 30.0 {
Err(format!(
"Low complexity variation (CV={cv:.1}%) - possible parser issue"
))
} else {
Ok(())
}
}),
));
// Entropy check for large codebases
checks.push((
"complexity_entropy",
Box::new(|result| {
let functions: Vec<_> = result
.complexity_metrics
.as_ref()
.map(|m| m.files.iter().flat_map(|f| &f.functions).collect())
.unwrap_or_default();
if functions.len() < 100 {
return Ok(()); // Too small for entropy analysis
}
let entropy = calculate_complexity_entropy(&functions);
if entropy < 2.0 {
Err(format!(
"Low complexity entropy: {entropy:.2} (expected >= 2.0)"
))
} else {
Ok(())
}
}),
));
}
fn add_coverage_checks(checks: &mut Vec<(&'static str, QualityCheck)>) {
// AST coverage check
checks.push((
"ast_coverage",
Box::new(|result| {
let total_files = result.file_tree.len();
let ast_files = result.ast_summaries.as_ref().map_or(0, std::vec::Vec::len);
if total_files == 0 {
return Err("No files found in project".into());
}
let coverage = ast_files as f64 / total_files as f64;
if coverage < 0.5 {
Err(format!(
"Low AST coverage: {:.1}% (expected >= 50%)",
coverage * 100.0
))
} else {
Ok(())
}
}),
));
}
fn add_section_checks(checks: &mut Vec<(&'static str, QualityCheck)>) {
// Empty sections check
checks.push((
"empty_sections",
Box::new(|result| {
let mut empty_sections = Vec::new();
if result.dead_code_analysis.is_none() {
empty_sections.push("dead_code_analysis");
}
if result.complexity_metrics.is_none() {
empty_sections.push("complexity_metrics");
}
if result.ast_summaries.is_none() {
empty_sections.push("ast_summaries");
}
if result.churn_analysis.is_none() {
empty_sections.push("churn_analysis");
}
if empty_sections.is_empty() {
Ok(())
} else {
Err(format!(
"Empty sections found: {}",
empty_sections.join(", ")
))
}
}),
));
}
}
fn calculate_complexity_entropy(functions: &[&FunctionComplexityForQA]) -> f64 {
let mut freq_map = FxHashMap::default();
for func in functions {
*freq_map.entry(func.cyclomatic).or_insert(0) += 1;
}
let total = functions.len() as f64;
freq_map
.values()
.map(|&count| {
let p = f64::from(count) / total;
-p * p.log2()
})
.sum()
}