pmat 3.11.0

PMAT - Zero-config AI context generation and code quality toolkit (CLI, MCP, HTTP)
// Analysis methods for ConcurrentDeepContextAnalyzer
// Included from deep_context_concurrent.rs - do NOT add `use` imports or `#!` attributes here

impl ConcurrentDeepContextAnalyzer {
    /// Analyze project with proper parallel processing
    pub async fn analyze_project_concurrent(&self, path: &Path) -> Result<DeepAnalysisResult> {
        info!("Starting concurrent analysis for {:?}", path);

        // Phase 1: Parse ALL files in parallel ONCE
        let ast_cache = self.parse_files_parallel(path).await?;

        // Phase 2: Run ALL analyses in parallel using tokio::join!
        let analyses = self.run_analyses_parallel(path, &ast_cache).await?;

        // Phase 3: Generate output with streaming
        let result = self.generate_output(analyses).await?;

        Ok(result)
    }

    /// Parse all files in parallel using rayon
    async fn parse_files_parallel(&self, path: &Path) -> Result<Arc<AstCache>> {
        use crate::services::file_discovery::ProjectFileDiscovery;

        let pb = self.create_progress_bar("Parsing files", 100);

        // Discover files
        let discovery = ProjectFileDiscovery::new(path.to_path_buf());
        let files = discovery.discover_files()?;

        pb.set_length(files.len() as u64);
        pb.set_message("Parsing ASTs in parallel");

        // Parse files in parallel using rayon
        let parsed_files: Vec<_> = files
            .par_iter()
            .map(|file| {
                pb.inc(1);
                self.parse_single_file(file)
            })
            .collect();

        pb.finish_with_message("AST parsing complete");

        // Build cache
        let mut cache = AstCache::new();
        for (file, ast) in files.iter().zip(parsed_files) {
            if let Ok(ast) = ast {
                cache.insert(file.clone(), ast);
            }
        }

        Ok(Arc::new(cache))
    }

    /// Run all analyses in parallel using tokio::join!
    async fn run_analyses_parallel(
        &self,
        path: &Path,
        ast_cache: &Arc<AstCache>,
    ) -> Result<CombinedAnalyses> {
        let pb = self.create_progress_bar("Running analyses", ANALYSIS_COUNT);

        // Clone for parallel execution
        let cache1 = ast_cache.clone();
        let cache2 = ast_cache.clone();
        let cache3 = ast_cache.clone();
        let cache4 = ast_cache.clone();
        let cache5 = ast_cache.clone();
        let cache6 = ast_cache.clone();
        let cache7 = ast_cache.clone();
        let cache8 = ast_cache.clone();

        let path3 = path.to_path_buf();
        let path4 = path.to_path_buf();
        let path5 = path.to_path_buf();

        // Run ALL analyses in parallel
        let (
            complexity,
            provability,
            satd,
            churn,
            dag,
            tdg,
            big_o,
            dead_code,
        ) = tokio::join!(
            self.analyze_complexity_cached(&cache1),
            self.analyze_provability_cached(&cache2),
            self.analyze_satd_async(&path3),
            self.analyze_churn_async(&path4),
            self.analyze_dag_async(&path5),
            self.analyze_tdg_cached(&cache6),
            self.analyze_big_o_cached(&cache7),
            self.analyze_dead_code_cached(&cache8),
        );

        pb.inc(ANALYSIS_COUNT);
        pb.finish_with_message("All analyses complete");

        Ok(CombinedAnalyses {
            complexity: complexity?,
            provability: provability?,
            satd: satd?,
            churn: churn?,
            dag: dag?,
            tdg: tdg?,
            big_o: big_o?,
            dead_code: dead_code?,
        })
    }

    /// Analyze complexity using cached AST
    async fn analyze_complexity_cached(&self, ast_cache: &Arc<AstCache>) -> Result<ComplexityResults> {
        // Use rayon for parallel complexity calculation
        let results: Vec<_> = ast_cache
            .files()
            .par_iter()
            .map(|(_file, ast)| {
                self.calculate_complexity_for_ast(ast)
            })
            .collect();

        Ok(ComplexityResults::combine(results))
    }

    /// Analyze provability using cached AST - NO TIMEOUT!
    async fn analyze_provability_cached(&self, ast_cache: &Arc<AstCache>) -> Result<Vec<ProofSummary>> {
        use crate::services::lightweight_provability_analyzer::LightweightProvabilityAnalyzer;

        let analyzer = LightweightProvabilityAnalyzer::new();

        // Extract functions from cached AST in parallel
        let function_ids: Vec<_> = ast_cache
            .files()
            .par_iter()
            .flat_map(|(_file, ast)| {
                self.extract_functions_from_ast(ast)
            })
            .collect();

        // Analyze in parallel batches using channels for backpressure
        let (tx, mut rx) = mpsc::channel(100); // Bounded channel

        // Spawn analysis tasks
        for chunk in function_ids.chunks(50) {
            let chunk = chunk.to_vec();
            let tx = tx.clone();
            let analyzer = analyzer.clone();

            tokio::spawn(async move {
                let summaries = analyzer.analyze_incrementally(&chunk).await;
                let _ = tx.send(summaries).await;
            });
        }

        drop(tx); // Close sender

        // Collect results
        let mut all_summaries = Vec::new();
        while let Some(summaries) = rx.recv().await {
            all_summaries.extend(summaries);
        }

        Ok(all_summaries)
    }

    /// Other async analyses
    async fn analyze_satd_async(&self, path: &Path) -> Result<SATDAnalysisResult> {
        use crate::services::satd_detector::SATDDetector;
        let detector = SATDDetector::new();
        detector.analyze_project(path, false).await
    }

    async fn analyze_churn_async(&self, path: &Path) -> Result<ChurnAnalysis> {
        analyze_churn(path, self.config.period_days).await
    }

    async fn analyze_dag_async(&self, path: &Path) -> Result<DependencyGraph> {
        analyze_dag(path, self.config.dag_type).await
    }

    async fn analyze_tdg_cached(&self, _ast_cache: &Arc<AstCache>) -> Result<TDGResults> {
        // Parallel TDG analysis
        Ok(TDGResults::default())
    }

    async fn analyze_big_o_cached(&self, _ast_cache: &Arc<AstCache>) -> Result<BigOResults> {
        // Parallel Big-O analysis
        Ok(BigOResults::default())
    }

    async fn analyze_dead_code_cached(&self, _ast_cache: &Arc<AstCache>) -> Result<DeadCodeResults> {
        // Parallel dead code detection
        Ok(DeadCodeResults::default())
    }

    /// Create a progress bar
    fn create_progress_bar(&self, message: &str, total: u64) -> ProgressBar {
        let pb = self.progress.add(ProgressBar::new(total));
        pb.set_style(
            ProgressStyle::default_bar()
                .template("{spinner:.green} {msg} [{bar:40.cyan/blue}] {pos}/{len} ({eta})")
                .expect("Progress bar template must be valid")
                .progress_chars("#>-"),
        );
        pb.set_message(message.to_string());
        pb
    }

    /// Parse a single file
    fn parse_single_file(&self, _file: &Path) -> Result<ParsedAst> {
        // Actual parsing logic here
        Ok(ParsedAst::default())
    }

    /// Calculate complexity for AST
    fn calculate_complexity_for_ast(&self, _ast: &ParsedAst) -> ComplexityResult {
        // Actual complexity calculation
        ComplexityResult::default()
    }

    /// Extract functions from AST
    fn extract_functions_from_ast(&self, _ast: &ParsedAst) -> Vec<FunctionId> {
        // Extract function IDs
        vec![]
    }

    /// Generate final output
    async fn generate_output(&self, analyses: CombinedAnalyses) -> Result<DeepAnalysisResult> {
        Ok(DeepAnalysisResult {
            analyses,
            timestamp: std::time::SystemTime::now(),
        })
    }
}