1use anyhow::Result;
7use std::path::Path;
8use std::process::Command;
9
10use crate::sandbox;
11
12#[derive(Debug, serde::Serialize)]
14pub struct QualityReport {
15 pub lint_passed: bool,
16 pub lint_issues: Vec<String>,
17 pub syntax_valid: bool,
18 pub has_tests: bool,
19 pub has_docstring: bool,
20 pub has_error_handling: bool,
21 pub has_hardcoded_secrets: bool,
22 pub score: f32,
23}
24
25#[derive(Debug, serde::Serialize)]
27pub struct ProjectReport {
28 pub file_reports: Vec<(String, QualityReport)>,
29 pub tests_passed: u32,
30 pub tests_failed: u32,
31 pub tests_run: bool,
32 pub avg_score: f32,
33 pub test_errors: Vec<String>,
35}
36
37pub fn verify_file(path: &Path, language: &str) -> Result<QualityReport> {
39 let content = std::fs::read_to_string(path)?;
40 let mut report = QualityReport {
41 lint_passed: true,
42 lint_issues: vec![],
43 syntax_valid: true,
44 has_tests: false,
45 has_docstring: false,
46 has_error_handling: false,
47 has_hardcoded_secrets: false,
48 score: 0.0,
49 };
50
51 check_secrets(&content, &mut report);
52 check_todos(&content, &mut report);
53
54 match language {
55 "python" => verify_python(path, &content, &mut report),
56 "javascript" | "typescript" => verify_js_ts(&content, &mut report),
57 "rust" => verify_rust(&content, &mut report),
58 "go" => verify_go(&content, &mut report),
59 _ => verify_generic(&content, &mut report),
60 }
61
62 calculate_score(&mut report);
63 Ok(report)
64}
65
66pub fn verify_project(dir: &Path, language: &str) -> Result<ProjectReport> {
68 let mut file_reports = Vec::new();
69
70 for entry in walkdir_files(dir)? {
72 let ext = entry.extension().and_then(|e| e.to_str()).unwrap_or("");
73 let file_lang = match ext {
74 "py" => "python",
75 "ts" | "tsx" => "typescript",
76 "js" | "jsx" => "javascript",
77 "rs" => "rust",
78 "go" => "go",
79 "cpp" | "cc" | "cxx" | "hpp" | "h" => "c++",
80 _ => continue,
81 };
82
83 if let Ok(report) = verify_file(&entry, file_lang) {
84 let rel = entry.strip_prefix(dir).unwrap_or(&entry);
85 file_reports.push((rel.display().to_string(), report));
86 }
87 }
88
89 run_project_linter(dir, language, &mut file_reports);
91
92 let (tests_passed, tests_failed, tests_run, test_errors) = run_project_tests(dir, language);
94
95 let avg_score = if file_reports.is_empty() {
96 5.0
97 } else {
98 file_reports.iter().map(|(_, r)| r.score).sum::<f32>() / file_reports.len() as f32
99 };
100
101 let adjusted_avg = if tests_run && (tests_passed > 0 || tests_failed > 0) {
103 let total = tests_passed + tests_failed;
104 let pass_rate = tests_passed as f32 / total as f32;
105 let test_adjustment = (pass_rate * 4.0) - 2.0; (avg_score + test_adjustment).clamp(0.0, 10.0)
108 } else {
109 avg_score
110 };
111
112 Ok(ProjectReport {
113 file_reports,
114 tests_passed,
115 tests_failed,
116 tests_run,
117 avg_score: adjusted_avg,
118 test_errors,
119 })
120}
121
122fn run_project_linter(dir: &Path, language: &str, reports: &mut Vec<(String, QualityReport)>) {
124 if language == "python" {
125 let result = sandbox::run_tool("ruff", &["check", "."], dir);
126 if !result.success && !result.stderr.contains("not found") {
127 let issues = sandbox::parse_ruff_output(&result.stdout);
128 if !issues.is_empty() {
129 println!(" ruff: {} issues found", issues.len());
130 for issue in &issues {
131 if let Some((_, report)) = reports.iter_mut().find(|(f, _)| *f == issue.file) {
133 report.lint_issues.push(issue.to_string());
134 report.lint_passed = false;
135 calculate_score(report);
136 }
137 }
138 }
139 }
140 }
141}
142
143fn run_project_tests(dir: &Path, language: &str) -> (u32, u32, bool, Vec<String>) {
146 match language {
147 "python" => {
148 if !sandbox::tool_exists("python3") {
149 println!(" pytest: python3 not found, skipping tests");
150 return (0, 0, false, vec![]);
151 }
152
153 let venv_dir = dir.join(".venv");
154 let venv_python = if cfg!(windows) {
156 venv_dir.join("Scripts").join("python.exe")
157 } else {
158 venv_dir.join("bin").join("python3")
159 };
160 let system_python = if cfg!(windows) { "python" } else { "python3" };
161
162 let python = if venv_python.exists() {
163 std::path::absolute(&venv_python)
166 .unwrap_or_else(|_| venv_python.clone())
167 .to_string_lossy()
168 .to_string()
169 } else {
170 println!(" Creating venv...");
172 let venv_result = sandbox::run_tool_with_timeout(
173 system_python,
174 &["-m", "venv", ".venv"],
175 dir,
176 30,
177 );
178 if venv_result.success {
179 std::path::absolute(&venv_python)
180 .unwrap_or_else(|_| venv_python.clone())
181 .to_string_lossy()
182 .to_string()
183 } else {
184 println!(" venv creation failed, using system python");
185 system_python.to_string()
186 }
187 };
188
189 println!(" Installing dependencies in venv...");
191 let pyproject_path = dir.join("pyproject.toml");
192 let req_path = dir.join("requirements.txt");
193
194 if req_path.exists() {
196 let _ = sandbox::run_tool_with_timeout(
197 &python,
198 &[
199 "-m",
200 "pip",
201 "install",
202 "-q",
203 "--disable-pip-version-check",
204 "-r",
205 "requirements.txt",
206 ],
207 dir,
208 90,
209 );
210 }
211
212 if pyproject_path.exists() {
214 let deps = extract_deps_from_pyproject(&pyproject_path);
215 if !deps.is_empty() {
216 println!(" Installing {} deps from pyproject.toml...", deps.len());
217 let dep_refs: Vec<&str> = deps.iter().map(|s| s.as_str()).collect();
218 let mut args =
219 vec!["-m", "pip", "install", "-q", "--disable-pip-version-check"];
220 args.extend(dep_refs.iter());
221 let _ = sandbox::run_tool_with_timeout(&python, &args, dir, 120);
222 }
223 }
224
225 let _ = sandbox::run_tool_with_timeout(
227 &python,
228 &[
229 "-m",
230 "pip",
231 "install",
232 "-q",
233 "--disable-pip-version-check",
234 "pytest",
235 "pytest-asyncio",
236 "pytest-cov",
237 "httpx",
238 "pydantic-settings",
239 "pydantic[email]",
240 "passlib[bcrypt]",
241 "python-jose[cryptography]",
242 "slowapi",
243 "aiosqlite",
244 "sqlalchemy[asyncio]",
245 "fastapi",
246 "uvicorn",
247 "python-multipart",
248 "email-validator",
249 "alembic",
250 "hypothesis",
251 "aiosqlite",
252 "bcrypt<5",
253 ],
254 dir,
255 90,
256 );
257
258 let mut attempt_result = sandbox::ToolResult {
260 success: false,
261 stdout: String::new(),
262 stderr: String::new(),
263 timed_out: false,
264 };
265
266 for attempt in 0..3 {
267 attempt_result = sandbox::run_tool_sandboxed(
268 &python,
269 &[
270 "-m",
271 "pytest",
272 "--tb=short",
273 "-q",
274 "--no-header",
275 "--import-mode=importlib",
276 ],
277 dir,
278 120,
279 true,
280 );
281
282 if attempt_result.timed_out {
283 println!(" pytest: timed out");
284 return (0, 0, true, vec!["pytest timed out after 120s".to_string()]);
285 }
286
287 let combined = format!("{}\n{}", attempt_result.stdout, attempt_result.stderr);
288
289 if combined.contains("Cannot overwrite a value")
291 || combined.contains("TOMLDecodeError")
292 || combined.contains("Invalid statement")
293 {
294 println!(" pytest: pyproject.toml has TOML errors, retrying without it...");
295 let pyp = dir.join("pyproject.toml");
296 let pyp_bak = dir.join("pyproject.toml.bak");
297 let renamed = std::fs::rename(&pyp, &pyp_bak).is_ok();
298 let ini_path = dir.join("pytest.ini");
300 let _ = std::fs::write(&ini_path, "[pytest]\nasyncio_mode = auto\n");
301 attempt_result = sandbox::run_tool_sandboxed(
302 &python,
303 &[
304 "-m",
305 "pytest",
306 "--tb=short",
307 "-q",
308 "--no-header",
309 "--import-mode=importlib",
310 "tests/",
311 ],
312 dir,
313 120,
314 true,
315 );
316 if renamed {
318 let _ = std::fs::rename(&pyp_bak, &pyp);
319 }
320 let _ = std::fs::remove_file(&ini_path);
321 break;
322 }
323
324 let tests_actually_ran =
327 combined.contains("FAILED ") || combined.contains(" passed");
328 if !tests_actually_ran
329 && (combined.contains("ModuleNotFoundError")
330 || combined.contains("ImportError")
331 || combined.contains("NameError")
332 || combined.contains("AttributeError")
333 || combined.contains("TypeError")
334 || combined.contains("SyntaxError")
335 || combined.contains("ValueError"))
336 {
337 if let Some(module) = extract_missing_module(&combined) {
338 println!(
339 " Installing missing module: {} (attempt {})",
340 module,
341 attempt + 1
342 );
343 let _ = sandbox::run_tool_with_timeout(
344 &python,
345 &["-m", "pip", "install", "-q", &module],
346 dir,
347 30,
348 );
349 continue; }
351 let mut errors = Vec::new();
353 for line in combined.lines() {
354 if line.contains("ModuleNotFoundError")
355 || line.contains("ImportError")
356 || line.contains("cannot import name")
357 || line.contains("circular import")
358 || line.contains("NameError")
359 || line.contains("AttributeError")
360 || line.contains("TypeError")
361 || line.contains("SyntaxError")
362 {
363 let err = line.trim().to_string();
364 println!(" pytest: {}", err);
365 errors.push(err);
366 }
367 }
368 if errors.is_empty() {
369 errors.push("ImportError prevented tests from running".to_string());
370 }
371 println!(" pytest: import errors prevented tests from running");
372 return (0, 1, true, errors);
373 }
374 break; }
376
377 let combined = format!("{}\n{}", attempt_result.stdout, attempt_result.stderr);
378
379 let test_result =
380 sandbox::parse_pytest_output(&attempt_result.stdout, &attempt_result.stderr);
381
382 if test_result.passed > 0 || test_result.failed > 0 || test_result.errors > 0 {
383 println!(
384 " pytest: {} passed, {} failed, {} errors",
385 test_result.passed, test_result.failed, test_result.errors
386 );
387 let mut errors = Vec::new();
389 if test_result.failed > 0 || test_result.errors > 0 {
390 for line in combined.lines() {
391 let l = line.trim();
392 if (l.starts_with("FAILED")
393 || l.starts_with("ERROR")
394 || l.contains("AssertionError")
395 || l.contains("assert ")
396 || l.contains("NameError")
397 || l.contains("AttributeError"))
398 && !errors.contains(&l.to_string())
399 {
400 errors.push(l.to_string());
401 }
402 }
403 }
404 (
405 test_result.passed,
406 test_result.failed + test_result.errors,
407 true,
408 errors,
409 )
410 } else if combined.contains("no tests ran") || combined.contains("collected 0 items") {
411 println!(" pytest: no tests found");
412 (0, 0, true, vec![])
413 } else {
414 println!(" pytest: ran but no parseable results");
416 let combined_out = format!("{}\n{}", attempt_result.stdout, attempt_result.stderr);
417 let first_lines: Vec<&str> = combined_out
418 .lines()
419 .filter(|l| !l.trim().is_empty())
420 .take(5)
421 .collect();
422 if !first_lines.is_empty() {
423 println!(" pytest output (first 5 lines):");
424 for line in &first_lines {
425 println!(" {}", line);
426 }
427 }
428 (0, 0, true, vec![])
429 }
430 }
431 "rust" => {
432 if !sandbox::tool_exists("cargo") {
433 println!(" cargo: not found, skipping tests");
434 return (0, 0, false, vec![]);
435 }
436 let result = sandbox::run_tool_sandboxed("cargo", &["test", "--quiet"], dir, 120, true);
437 if result.timed_out {
438 println!(" cargo test: timed out");
439 return (0, 0, true, vec![]);
440 }
441 let combined = format!("{}\n{}", result.stdout, result.stderr);
442 for line in combined.lines() {
443 if line.contains("test result:") {
444 let passed = line
445 .split("passed")
446 .next()
447 .and_then(|s| s.split_whitespace().last())
448 .and_then(|s| s.parse::<u32>().ok())
449 .unwrap_or(0);
450 let failed = line
451 .split("failed")
452 .next()
453 .and_then(|s| s.split_whitespace().last())
454 .and_then(|s| s.parse::<u32>().ok())
455 .unwrap_or(0);
456 println!(" cargo test: {} passed, {} failed", passed, failed);
457 return (passed, failed, true, vec![]);
458 }
459 }
460 if result.success {
461 println!(" cargo test: passed");
462 (1, 0, true, vec![])
463 } else {
464 println!(" cargo test: failed");
465 (0, 1, true, vec![])
466 }
467 }
468 "go" => {
469 if !sandbox::tool_exists("go") {
470 println!(" go: not found, skipping tests");
471 return (0, 0, false, vec![]);
472 }
473 let result =
474 sandbox::run_tool_sandboxed("go", &["test", "./...", "-count=1"], dir, 120, true);
475 if result.timed_out {
476 println!(" go test: timed out");
477 return (0, 0, true, vec![]);
478 }
479 let combined = format!("{}\n{}", result.stdout, result.stderr);
480 if combined.contains("PASS") {
481 let passed = combined.matches("--- PASS").count() as u32;
482 let failed = combined.matches("--- FAIL").count() as u32;
483 println!(" go test: {} passed, {} failed", passed.max(1), failed);
484 (passed.max(1), failed, true, vec![])
485 } else {
486 println!(" go test: failed");
487 (0, 1, true, vec![])
488 }
489 }
490 "typescript" | "javascript" => {
491 let pkg_json = dir.join("package.json");
492 if !pkg_json.exists() {
493 return (0, 0, false, vec![]);
494 }
495 let _ = sandbox::run_tool_with_timeout("npm", &["install", "--silent"], dir, 90);
496 let result = sandbox::run_tool_sandboxed(
497 "npm",
498 &["test", "--", "--passWithNoTests"],
499 dir,
500 120,
501 true,
502 );
503 if result.timed_out {
504 println!(" npm test: timed out");
505 return (0, 0, true, vec![]);
506 }
507 if result.success {
508 println!(" npm test: passed");
509 (1, 0, true, vec![])
510 } else {
511 println!(" npm test: failed");
512 (0, 1, true, vec![])
513 }
514 }
515 _ => (0, 0, false, vec![]),
516 }
517}
518
519fn extract_deps_from_pyproject(path: &std::path::Path) -> Vec<String> {
521 let content = match std::fs::read_to_string(path) {
522 Ok(c) => c,
523 Err(_) => return vec![],
524 };
525
526 let mut deps = Vec::new();
527 let mut in_deps_section = false;
528
529 for line in content.lines() {
530 let trimmed = line.trim();
531
532 if trimmed == "[tool.poetry.dependencies]"
534 || trimmed == "[tool.poetry.group.dev.dependencies]"
535 || trimmed == "[project]"
536 || trimmed.starts_with("[project.optional-dependencies")
537 {
538 in_deps_section = true;
539 continue;
540 }
541
542 if trimmed.starts_with('[') && in_deps_section {
544 in_deps_section = false;
545 continue;
546 }
547
548 if !in_deps_section {
549 continue;
550 }
551
552 if trimmed.starts_with("python ") || trimmed.starts_with("python=") {
554 continue;
555 }
556
557 if let Some(eq_pos) = trimmed.find('=') {
559 let pkg = trimmed[..eq_pos].trim().trim_matches('"');
560 if pkg.is_empty()
561 || pkg.contains(' ')
562 || pkg == "name"
563 || pkg == "version"
564 || pkg == "description"
565 || pkg == "authors"
566 || pkg == "license"
567 || pkg == "readme"
568 || pkg == "requires-python"
569 {
570 continue;
571 }
572
573 let value = trimmed[eq_pos + 1..].trim();
575 if value.contains("extras") {
576 if let Some(extras_start) = value.find('[') {
578 if let Some(extras_end) = value.find(']') {
579 let extras: Vec<&str> = value[extras_start + 1..extras_end]
580 .split(',')
581 .map(|e| e.trim().trim_matches('"').trim_matches('\''))
582 .collect();
583 let extras_str = extras.join(",");
584 deps.push(format!("{}[{}]", pkg, extras_str));
585 continue;
586 }
587 }
588 }
589 deps.push(pkg.to_string());
590 }
591
592 if trimmed.starts_with('"') || trimmed.starts_with('\'') {
594 let dep = trimmed.trim_matches(|c: char| c == '"' || c == '\'' || c == ',');
595 if !dep.is_empty() {
596 deps.push(dep.to_string());
597 }
598 }
599 }
600
601 deps
602}
603
604fn extract_missing_module(output: &str) -> Option<String> {
606 for line in output.lines() {
607 if line.contains("No module named") {
609 let after = line.split("No module named").nth(1)?;
610 let module = after
611 .trim()
612 .trim_matches(|c: char| c == '\'' || c == '"' || c == ' ');
613 let pip_name = match module {
615 "jose" => "python-jose[cryptography]",
616 "jwt" => "PyJWT",
617 "passlib" => "passlib[bcrypt]",
618 "sqlalchemy" => "sqlalchemy",
619 "pydantic" => "pydantic[email]",
620 "fastapi" => "fastapi",
621 "httpx" => "httpx",
622 "slowapi" => "slowapi",
623 "starlette" => "starlette",
624 "dotenv" => "python-dotenv",
625 _ => module,
626 };
627 return Some(pip_name.to_string());
628 }
629 }
630 None
631}
632
633fn check_secrets(content: &str, report: &mut QualityReport) {
634 let secret_patterns = [
635 "password = \"",
636 "secret_key = \"",
637 "api_key = \"",
638 "AWS_SECRET",
639 "PRIVATE_KEY",
640 "sk-",
641 "ghp_",
642 ];
643 for pattern in &secret_patterns {
644 if content.to_lowercase().contains(&pattern.to_lowercase()) {
645 report.has_hardcoded_secrets = true;
646 report
647 .lint_issues
648 .push(format!("Possible hardcoded secret: {}", pattern));
649 }
650 }
651}
652
653fn check_todos(content: &str, report: &mut QualityReport) {
654 for (i, line) in content.lines().enumerate() {
655 let trimmed = line.trim();
656 let is_comment_line = trimmed.starts_with('#')
660 || trimmed.starts_with("//")
661 || trimmed.starts_with("/*")
662 || trimmed.starts_with('*');
663 if !is_comment_line {
664 continue;
665 }
666 let lower = trimmed.to_lowercase();
667 if lower.contains("todo") || lower.contains("fixme") || lower.contains("hack") {
668 report
669 .lint_issues
670 .push(format!("Line {}: TODO/FIXME found", i + 1));
671 }
672 }
673}
674
675fn verify_python(_path: &Path, content: &str, report: &mut QualityReport) {
676 use std::process::Stdio;
678 let child = Command::new("python3")
679 .args(["-c", "import ast, sys; ast.parse(sys.stdin.read())"])
680 .stdin(Stdio::piped())
681 .stdout(Stdio::null())
682 .stderr(Stdio::piped())
683 .spawn();
684 if let Ok(mut child) = child {
685 if let Some(mut stdin) = child.stdin.take() {
686 let _ = std::io::Write::write_all(&mut stdin, content.as_bytes());
687 }
688 if let Ok(output) = child.wait_with_output() {
689 report.syntax_valid = output.status.success();
690 if !output.status.success() {
691 report
692 .lint_issues
693 .push("Python syntax error in generated code".to_string());
694 }
695 return;
696 }
697 }
698 report.syntax_valid = true;
700
701 report.has_tests = content.contains("def test_") || content.contains("class Test");
702 report.has_docstring = content.contains("\"\"\"") || content.contains("'''");
703 report.has_error_handling = content.contains("try:") || content.contains("except ");
704}
705
706fn verify_js_ts(content: &str, report: &mut QualityReport) {
707 report.has_tests =
708 content.contains("describe(") || content.contains("test(") || content.contains("it(");
709 report.has_docstring = content.contains("/**") || content.contains("///");
710 report.has_error_handling =
711 content.contains("try {") || content.contains("catch (") || content.contains(".catch(");
712 report.syntax_valid = content.contains("function ")
713 || content.contains("const ")
714 || content.contains("export ")
715 || content.contains("class ");
716}
717
718fn verify_rust(content: &str, report: &mut QualityReport) {
719 report.has_tests = content.contains("#[test]") || content.contains("#[cfg(test)]");
720 report.has_docstring = content.contains("///") || content.contains("//!");
721 report.has_error_handling = content.contains("Result<")
722 || content.contains("anyhow::")
723 || content.contains(".unwrap_or");
724 report.syntax_valid =
725 content.contains("fn ") || content.contains("struct ") || content.contains("impl ");
726}
727
728fn verify_go(content: &str, report: &mut QualityReport) {
729 report.has_tests = content.contains("func Test");
730 report.has_docstring = content.lines().any(|l| l.starts_with("//"));
731 report.has_error_handling = content.contains("if err != nil");
732 report.syntax_valid = content.contains("package ") && content.contains("func ");
733}
734
735fn verify_generic(content: &str, report: &mut QualityReport) {
736 report.syntax_valid = content.len() > 50;
737 report.has_docstring =
738 content.contains("//") || content.contains("#") || content.contains("/*");
739}
740
741fn calculate_score(report: &mut QualityReport) {
742 let mut score: f32 = 5.0;
743 if report.syntax_valid {
744 score += 1.5;
745 }
746 if report.lint_passed {
747 score += 1.0;
748 }
749 if report.has_tests {
750 score += 1.0;
751 }
752 if report.has_docstring {
753 score += 0.5;
754 }
755 if report.has_error_handling {
756 score += 1.0;
757 }
758 if report.has_hardcoded_secrets {
759 score -= 2.0;
760 }
761 if !report.lint_issues.is_empty() {
762 score -= (0.2 * report.lint_issues.len() as f32).min(2.0);
763 }
764 report.score = score.clamp(0.0, 10.0);
765}
766
767fn walkdir_files(dir: &Path) -> Result<Vec<std::path::PathBuf>> {
768 let mut files = Vec::new();
769 if dir.is_dir() {
770 for entry in std::fs::read_dir(dir)? {
771 let entry = entry?;
772 let path = entry.path();
773 if path.is_dir() {
774 let name = entry.file_name().to_string_lossy().to_string();
776 if name == ".venv"
777 || name == "venv"
778 || name == "__pycache__"
779 || name == "node_modules"
780 || name == ".git"
781 || name == "target"
782 || name == ".mypy_cache"
783 || name == ".pytest_cache"
784 || name == ".ruff_cache"
785 || name == "dist"
786 || name == "build"
787 || name == ".egg-info"
788 || name.ends_with(".egg-info")
789 {
790 continue;
791 }
792 files.extend(walkdir_files(&path)?);
793 } else {
794 files.push(path);
795 }
796 }
797 }
798 Ok(files)
799}
800
801#[cfg(test)]
802mod tests {
803 use super::*;
804
805 fn empty_report() -> QualityReport {
806 QualityReport {
807 lint_passed: true,
808 lint_issues: vec![],
809 syntax_valid: true,
810 has_tests: false,
811 has_docstring: false,
812 has_error_handling: false,
813 has_hardcoded_secrets: false,
814 score: 0.0,
815 }
816 }
817
818 #[test]
819 fn test_secret_detection() {
820 let mut report = empty_report();
821 check_secrets("password = \"hunter2\"", &mut report);
822 assert!(report.has_hardcoded_secrets);
823 }
824
825 #[test]
826 fn test_no_false_positive_secrets() {
827 let mut report = empty_report();
828 check_secrets("let x = 42;", &mut report);
829 assert!(!report.has_hardcoded_secrets);
830 }
831
832 #[test]
833 fn test_todo_detection() {
834 let mut report = empty_report();
835 check_todos("# TODO: fix this later\ncode here", &mut report);
836 assert_eq!(report.lint_issues.len(), 1);
837 }
838
839 #[test]
840 fn test_score_calculation() {
841 let mut report = empty_report();
842 report.syntax_valid = true;
843 report.lint_passed = true;
844 report.has_tests = true;
845 report.has_docstring = true;
846 report.has_error_handling = true;
847 calculate_score(&mut report);
848 assert_eq!(report.score, 10.0);
849 }
850
851 #[test]
852 fn test_score_with_secrets_penalty() {
853 let mut report = empty_report();
854 report.has_hardcoded_secrets = true;
855 report.lint_issues.push("secret found".into());
856 calculate_score(&mut report);
857 let mut clean_report = empty_report();
859 calculate_score(&mut clean_report);
860 assert!(report.score < clean_report.score);
861 }
862}