1use std::fs;
2use std::path::{Path, PathBuf};
3
4use pedant_types::AnalysisTier;
5
6use crate::analysis_result::AnalysisResult;
7use crate::capabilities::detect_capabilities;
8use crate::check_config::CheckConfig;
9use crate::ir;
10use crate::ir::DataFlowFact;
11use crate::ir::semantic::SemanticContext;
12use crate::style::check_style;
13
14#[derive(Debug, thiserror::Error)]
16pub enum LintError {
17 #[error("IO error: {0}")]
19 IoError(#[from] std::io::Error),
20 #[error("parse error: {0}")]
22 ParseError(#[from] syn::Error),
23 #[error("TOML parse error: {0}")]
25 TomlParseError(#[from] toml::de::Error),
26}
27
28pub fn analyze(
33 file_path: &str,
34 source: &str,
35 config: &CheckConfig,
36 semantic: Option<&SemanticContext>,
37) -> Result<AnalysisResult, syn::Error> {
38 analyze_inner(file_path, source, config, semantic, false)
39}
40
41pub fn analyze_build_script(
43 file_path: &str,
44 source: &str,
45 config: &CheckConfig,
46 semantic: Option<&SemanticContext>,
47) -> Result<AnalysisResult, syn::Error> {
48 analyze_inner(file_path, source, config, semantic, true)
49}
50
51fn analyze_inner(
52 file_path: &str,
53 source: &str,
54 config: &CheckConfig,
55 semantic: Option<&SemanticContext>,
56 build_script: bool,
57) -> Result<AnalysisResult, syn::Error> {
58 let syntax = syn::parse_file(source)?;
59 let ir = ir::extract(file_path, &syntax, semantic);
60 let violations = check_style(&ir, config).into_boxed_slice();
61 let capabilities = detect_capabilities(&ir, build_script);
62
63 #[cfg(feature = "semantic")]
64 let capabilities = {
65 let mut caps = capabilities;
66 if let Some(ctx) = semantic {
67 enrich_reachability(&mut caps.findings, ctx);
68 }
69 caps
70 };
71
72 Ok(AnalysisResult {
73 violations,
74 capabilities,
75 data_flows: ir.data_flows,
76 })
77}
78
79pub fn lint_str(source: &str, config: &CheckConfig) -> Result<AnalysisResult, LintError> {
81 analyze("<string>", source, config, None).map_err(LintError::from)
82}
83
84pub fn lint_file(path: &Path, config: &CheckConfig) -> Result<AnalysisResult, LintError> {
86 let source = fs::read_to_string(path)?;
87 let file_path = path.to_string_lossy();
88 analyze(&file_path, &source, config, None).map_err(LintError::from)
89}
90
91pub fn discover_workspace_root(start: &Path) -> Option<PathBuf> {
96 let start_dir = match start.is_dir() {
97 true => start,
98 false => start.parent()?,
99 };
100
101 let mut nearest_package: Option<PathBuf> = None;
102 for dir in start_dir.ancestors() {
103 let cargo_toml = dir.join("Cargo.toml");
104 let (has_workspace, has_package) = fs::read_to_string(&cargo_toml)
105 .map(|c| (c.contains("[workspace]"), c.contains("[package]")))
106 .unwrap_or((false, false));
107 match (has_workspace, has_package, nearest_package.is_some()) {
108 (true, _, _) => return Some(dir.to_path_buf()),
109 (false, true, false) => nearest_package = Some(dir.to_path_buf()),
110 _ => {}
111 }
112 }
113 nearest_package
114}
115
116pub fn discover_build_script(crate_root: &Path) -> Result<Option<PathBuf>, LintError> {
121 let cargo_toml_path = crate_root.join("Cargo.toml");
122 let cargo_toml_contents = match fs::read_to_string(&cargo_toml_path) {
123 Ok(contents) => contents,
124 Err(e) if e.kind() == std::io::ErrorKind::NotFound => return Ok(None),
125 Err(e) => return Err(LintError::IoError(e)),
126 };
127 let table: toml::Table = cargo_toml_contents.parse()?;
128
129 let custom_path = table
130 .get("package")
131 .and_then(toml::Value::as_table)
132 .and_then(|pkg| pkg.get("build"))
133 .and_then(toml::Value::as_str);
134
135 let candidate = match custom_path {
136 Some(build_path) => crate_root.join(build_path),
137 None => crate_root.join("build.rs"),
138 };
139
140 Ok(candidate.is_file().then_some(candidate))
141}
142
143pub fn analyze_with_build_script(
148 file_path: &str,
149 source: &str,
150 config: &CheckConfig,
151 semantic: Option<&SemanticContext>,
152 build_source: Option<(&str, &str)>,
153) -> Result<AnalysisResult, syn::Error> {
154 let mut result = analyze(file_path, source, config, semantic)?;
155
156 let Some((build_path, build_src)) = build_source else {
157 return Ok(result);
158 };
159
160 let build_syntax = syn::parse_file(build_src)?;
161 let build_ir = ir::extract(build_path, &build_syntax, semantic);
162 let build_caps = detect_capabilities(&build_ir, true);
163
164 let mut merged: Vec<pedant_types::CapabilityFinding> = result.capabilities.findings.into_vec();
165 merged.extend(build_caps.findings.into_vec());
166 result.capabilities.findings = merged.into_boxed_slice();
167
168 Ok(result)
169}
170
171pub fn determine_analysis_tier(
178 semantic: Option<&SemanticContext>,
179 data_flows: &[DataFlowFact],
180) -> AnalysisTier {
181 match (semantic.is_some(), !data_flows.is_empty()) {
182 (_, true) => AnalysisTier::DataFlow,
183 (true, false) => AnalysisTier::Semantic,
184 (false, false) => AnalysisTier::Syntactic,
185 }
186}
187
188#[cfg(feature = "semantic")]
194fn enrich_reachability(findings: &mut [pedant_types::CapabilityFinding], ctx: &SemanticContext) {
195 use std::collections::BTreeMap;
196
197 let mut by_file: BTreeMap<String, Vec<usize>> = BTreeMap::new();
200 for (idx, finding) in findings.iter().enumerate() {
201 by_file
202 .entry(finding.location.file.to_string())
203 .or_default()
204 .push(idx);
205 }
206
207 for (file, indices) in &by_file {
208 let lines: Vec<usize> = indices.iter().map(|&i| findings[i].location.line).collect();
209 let results = ctx.check_reachability_batch(file, &lines);
210 for (pos, &idx) in indices.iter().enumerate() {
211 findings[idx].reachable = Some(results[pos]);
212 }
213 }
214}