impl ConcurrentDeepContextAnalyzer {
#[provable_contracts_macros::contract("pmat-core.yaml", equation = "path_exists")]
pub async fn analyze_project_concurrent(&self, path: &Path) -> Result<DeepAnalysisResult> {
info!("Starting concurrent analysis for {:?}", path);
let ast_cache = self.parse_files_parallel(path).await?;
let analyses = self.run_analyses_parallel(path, &ast_cache).await?;
let result = self.generate_output(analyses).await?;
Ok(result)
}
async fn parse_files_parallel(&self, path: &Path) -> Result<Arc<AstCache>> {
use crate::services::file_discovery::ProjectFileDiscovery;
let pb = self.create_progress_bar("Parsing files", 100);
let discovery = ProjectFileDiscovery::new(path.to_path_buf());
let files = discovery.discover_files()?;
pb.set_length(files.len() as u64);
pb.set_message("Parsing ASTs in parallel");
let parsed_files: Vec<_> = files
.par_iter()
.map(|file| {
pb.inc(1);
self.parse_single_file(file)
})
.collect();
pb.finish_with_message("AST parsing complete");
let mut cache = AstCache::new();
for (file, ast) in files.iter().zip(parsed_files) {
if let Ok(ast) = ast {
cache.insert(file.clone(), ast);
}
}
Ok(Arc::new(cache))
}
async fn run_analyses_parallel(
&self,
path: &Path,
ast_cache: &Arc<AstCache>,
) -> Result<CombinedAnalyses> {
let pb = self.create_progress_bar("Running analyses", ANALYSIS_COUNT);
let cache1 = ast_cache.clone();
let cache2 = ast_cache.clone();
let cache3 = ast_cache.clone();
let cache4 = ast_cache.clone();
let cache5 = ast_cache.clone();
let cache6 = ast_cache.clone();
let cache7 = ast_cache.clone();
let cache8 = ast_cache.clone();
let path3 = path.to_path_buf();
let path4 = path.to_path_buf();
let path5 = path.to_path_buf();
let (
complexity,
provability,
satd,
churn,
dag,
tdg,
big_o,
dead_code,
) = tokio::join!(
self.analyze_complexity_cached(&cache1),
self.analyze_provability_cached(&cache2),
self.analyze_satd_async(&path3),
self.analyze_churn_async(&path4),
self.analyze_dag_async(&path5),
self.analyze_tdg_cached(&cache6),
self.analyze_big_o_cached(&cache7),
self.analyze_dead_code_cached(&cache8),
);
pb.inc(ANALYSIS_COUNT);
pb.finish_with_message("All analyses complete");
Ok(CombinedAnalyses {
complexity: complexity?,
provability: provability?,
satd: satd?,
churn: churn?,
dag: dag?,
tdg: tdg?,
big_o: big_o?,
dead_code: dead_code?,
})
}
async fn analyze_complexity_cached(&self, ast_cache: &Arc<AstCache>) -> Result<ComplexityResults> {
let results: Vec<_> = ast_cache
.files()
.par_iter()
.map(|(_file, ast)| {
self.calculate_complexity_for_ast(ast)
})
.collect();
Ok(ComplexityResults::combine(results))
}
async fn analyze_provability_cached(&self, ast_cache: &Arc<AstCache>) -> Result<Vec<ProofSummary>> {
use crate::services::lightweight_provability_analyzer::LightweightProvabilityAnalyzer;
let analyzer = LightweightProvabilityAnalyzer::new();
let function_ids: Vec<_> = ast_cache
.files()
.par_iter()
.flat_map(|(_file, ast)| {
self.extract_functions_from_ast(ast)
})
.collect();
let (tx, mut rx) = mpsc::channel(100);
for chunk in function_ids.chunks(50) {
let chunk = chunk.to_vec();
let tx = tx.clone();
let analyzer = analyzer.clone();
tokio::spawn(async move {
let summaries = analyzer.analyze_incrementally(&chunk).await;
let _ = tx.send(summaries).await;
});
}
drop(tx);
let mut all_summaries = Vec::new();
while let Some(summaries) = rx.recv().await {
all_summaries.extend(summaries);
}
Ok(all_summaries)
}
async fn analyze_satd_async(&self, path: &Path) -> Result<SATDAnalysisResult> {
use crate::services::satd_detector::SATDDetector;
let detector = SATDDetector::new();
detector.analyze_project(path, false).await
}
async fn analyze_churn_async(&self, path: &Path) -> Result<ChurnAnalysis> {
analyze_churn(path, self.config.period_days).await
}
async fn analyze_dag_async(&self, path: &Path) -> Result<DependencyGraph> {
analyze_dag(path, self.config.dag_type).await
}
async fn analyze_tdg_cached(&self, _ast_cache: &Arc<AstCache>) -> Result<TDGResults> {
Ok(TDGResults::default())
}
async fn analyze_big_o_cached(&self, _ast_cache: &Arc<AstCache>) -> Result<BigOResults> {
Ok(BigOResults::default())
}
async fn analyze_dead_code_cached(&self, _ast_cache: &Arc<AstCache>) -> Result<DeadCodeResults> {
Ok(DeadCodeResults::default())
}
fn create_progress_bar(&self, message: &str, total: u64) -> ProgressBar {
let pb = self.progress.add(ProgressBar::new(total));
pb.set_style(
ProgressStyle::default_bar()
.template("{spinner:.green} {msg} [{bar:40.cyan/blue}] {pos}/{len} ({eta})")
.expect("Progress bar template must be valid")
.progress_chars("#>-"),
);
pb.set_message(message.to_string());
pb
}
fn parse_single_file(&self, _file: &Path) -> Result<ParsedAst> {
Ok(ParsedAst::default())
}
fn calculate_complexity_for_ast(&self, _ast: &ParsedAst) -> ComplexityResult {
ComplexityResult::default()
}
fn extract_functions_from_ast(&self, _ast: &ParsedAst) -> Vec<FunctionId> {
vec![]
}
async fn generate_output(&self, analyses: CombinedAnalyses) -> Result<DeepAnalysisResult> {
Ok(DeepAnalysisResult {
analyses,
timestamp: std::time::SystemTime::now(),
})
}
}