1use std::fs;
2use std::path::{Path, PathBuf};
3
4use pedant_types::AnalysisTier;
5
6use crate::analysis_result::AnalysisResult;
7use crate::capabilities::detect_capabilities;
8use crate::check_config::CheckConfig;
9use crate::ir;
10use crate::ir::DataFlowFact;
11use crate::ir::extract::compute_fingerprints;
12use crate::ir::semantic::SemanticContext;
13use crate::style::check_style;
14
15#[derive(Debug, thiserror::Error)]
17pub enum LintError {
18 #[error("IO error: {0}")]
20 IoError(#[from] std::io::Error),
21 #[error("parse error: {0}")]
23 ParseError(#[from] syn::Error),
24 #[error("TOML parse error: {0}")]
26 TomlParseError(#[from] toml::de::Error),
27}
28
29pub fn analyze(
34 file_path: &str,
35 source: &str,
36 config: &CheckConfig,
37 semantic: Option<&SemanticContext>,
38) -> Result<AnalysisResult, syn::Error> {
39 analyze_inner(file_path, source, config, semantic, false)
40}
41
42pub fn analyze_build_script(
44 file_path: &str,
45 source: &str,
46 config: &CheckConfig,
47 semantic: Option<&SemanticContext>,
48) -> Result<AnalysisResult, syn::Error> {
49 analyze_inner(file_path, source, config, semantic, true)
50}
51
52fn analyze_inner(
53 file_path: &str,
54 source: &str,
55 config: &CheckConfig,
56 semantic: Option<&SemanticContext>,
57 build_script: bool,
58) -> Result<AnalysisResult, syn::Error> {
59 let syntax = syn::parse_file(source)?;
60 let ir = ir::extract(file_path, &syntax, semantic);
61 let violations = check_style(&ir, config).into_boxed_slice();
62 let capabilities = detect_capabilities(&ir, build_script);
63
64 #[cfg(feature = "semantic")]
65 let capabilities = {
66 let mut caps = capabilities;
67 if let Some(ctx) = semantic {
68 enrich_reachability(&mut caps.findings, ctx);
69 }
70 caps
71 };
72
73 let fn_fingerprints = compute_fingerprints(&ir);
74
75 Ok(AnalysisResult {
76 violations,
77 capabilities,
78 data_flows: ir.data_flows,
79 fn_fingerprints,
80 })
81}
82
83pub fn lint_str(source: &str, config: &CheckConfig) -> Result<AnalysisResult, LintError> {
85 analyze("<string>", source, config, None).map_err(LintError::from)
86}
87
88pub fn lint_file(path: &Path, config: &CheckConfig) -> Result<AnalysisResult, LintError> {
90 let source = fs::read_to_string(path)?;
91 let file_path = path.to_string_lossy();
92 analyze(&file_path, &source, config, None).map_err(LintError::from)
93}
94
95pub fn discover_workspace_root(start: &Path) -> Option<PathBuf> {
100 let start_dir = match start.is_dir() {
101 true => start,
102 false => start.parent()?,
103 };
104
105 let mut nearest_package: Option<PathBuf> = None;
106 for dir in start_dir.ancestors() {
107 let cargo_toml = dir.join("Cargo.toml");
108 let (has_workspace, has_package) = fs::read_to_string(&cargo_toml)
109 .map(|c| (c.contains("[workspace]"), c.contains("[package]")))
110 .unwrap_or((false, false));
111 match (has_workspace, has_package, nearest_package.is_some()) {
112 (true, _, _) => return Some(dir.to_path_buf()),
113 (false, true, false) => nearest_package = Some(dir.to_path_buf()),
114 _ => {}
115 }
116 }
117 nearest_package
118}
119
120pub fn discover_build_script(crate_root: &Path) -> Result<Option<PathBuf>, LintError> {
125 let cargo_toml_path = crate_root.join("Cargo.toml");
126 let cargo_toml_contents = match fs::read_to_string(&cargo_toml_path) {
127 Ok(contents) => contents,
128 Err(e) if e.kind() == std::io::ErrorKind::NotFound => return Ok(None),
129 Err(e) => return Err(LintError::IoError(e)),
130 };
131 let table: toml::Table = cargo_toml_contents.parse()?;
132
133 let custom_path = table
134 .get("package")
135 .and_then(toml::Value::as_table)
136 .and_then(|pkg| pkg.get("build"))
137 .and_then(toml::Value::as_str);
138
139 let candidate = match custom_path {
140 Some(build_path) => crate_root.join(build_path),
141 None => crate_root.join("build.rs"),
142 };
143
144 Ok(candidate.is_file().then_some(candidate))
145}
146
147pub fn analyze_with_build_script(
152 file_path: &str,
153 source: &str,
154 config: &CheckConfig,
155 semantic: Option<&SemanticContext>,
156 build_source: Option<(&str, &str)>,
157) -> Result<AnalysisResult, syn::Error> {
158 let mut result = analyze(file_path, source, config, semantic)?;
159
160 let Some((build_path, build_src)) = build_source else {
161 return Ok(result);
162 };
163
164 let build_syntax = syn::parse_file(build_src)?;
165 let build_ir = ir::extract(build_path, &build_syntax, semantic);
166 let build_caps = detect_capabilities(&build_ir, true);
167
168 let mut merged = result.capabilities.findings.into_vec();
169 merged.extend(build_caps.findings);
170 result.capabilities.findings = merged.into_boxed_slice();
171
172 Ok(result)
173}
174
175pub fn determine_analysis_tier(
182 semantic: Option<&SemanticContext>,
183 data_flows: &[DataFlowFact],
184) -> AnalysisTier {
185 match (semantic.is_some(), !data_flows.is_empty()) {
186 (_, true) => AnalysisTier::DataFlow,
187 (true, false) => AnalysisTier::Semantic,
188 (false, false) => AnalysisTier::Syntactic,
189 }
190}
191
192#[cfg(feature = "semantic")]
198fn enrich_reachability(findings: &mut [pedant_types::CapabilityFinding], ctx: &SemanticContext) {
199 use std::collections::BTreeMap;
200 use std::sync::Arc;
201
202 let mut by_file: BTreeMap<Arc<str>, Vec<usize>> = BTreeMap::new();
204 for (idx, finding) in findings.iter().enumerate() {
205 by_file
206 .entry(Arc::clone(&finding.location.file))
207 .or_default()
208 .push(idx);
209 }
210
211 for (file, indices) in &by_file {
212 let lines: Vec<usize> = indices.iter().map(|&i| findings[i].location.line).collect();
213 let results = ctx.check_reachability_batch(file, &lines);
214 for (pos, &idx) in indices.iter().enumerate() {
215 findings[idx].reachable = Some(results[pos]);
216 }
217 }
218}