async fn scan_rust_files_only(
root_path: &Path,
toolchain: &str,
cache_manager: Option<Arc<SessionCacheManager>>,
gitignore: &ignore::gitignore::Gitignore,
) -> Vec<FileContext> {
const MAX_DEPTH: usize = 5; const MAX_FILES: usize = 100; const BATCH_SIZE: usize = 20;
let paths: Vec<_> = WalkDir::new(root_path)
.follow_links(false)
.max_depth(MAX_DEPTH)
.into_iter()
.filter_map(std::result::Result::ok)
.filter(|entry| {
let path = entry.path();
if path.is_dir() || gitignore.matched(path, false).is_ignore() {
return false;
}
if !path.extension().is_some_and(|ext| ext == "rs") {
return false;
}
let path_str = path.to_string_lossy();
!path_str.contains("/tests/")
&& !path_str.contains("/test/")
&& !path_str.contains("/examples/")
&& !path_str.contains("/benches/")
&& !path_str.contains("_test.rs")
&& !path_str.ends_with("/build.rs")
})
.take(MAX_FILES)
.map(|entry| entry.path().to_path_buf())
.collect();
eprintln!(
"🎯 Dead code analysis: scanning {} Rust source files (max {})",
paths.len(),
MAX_FILES
);
let mut all_results = Vec::new();
for chunk in paths.chunks(BATCH_SIZE) {
let batch_tasks: Vec<_> = chunk
.iter()
.map(|path| {
let path = path.clone();
let toolchain = toolchain.to_string();
let cache_manager = cache_manager.clone();
tokio::spawn(async move {
let timeout_duration = tokio::time::Duration::from_secs(2);
tokio::time::timeout(timeout_duration, async move {
analyze_file_by_toolchain(&path, &toolchain, cache_manager).await
})
.await
.ok()
.flatten()
})
})
.collect();
let batch_results = join_all(batch_tasks).await;
all_results.extend(
batch_results
.into_iter()
.filter_map(std::result::Result::ok)
.flatten(),
);
}
all_results
}
async fn scan_and_analyze_files(
root_path: &Path,
toolchain: &str,
cache_manager: Option<Arc<SessionCacheManager>>,
gitignore: &ignore::gitignore::Gitignore,
) -> Vec<FileContext> {
const MAX_DEPTH: usize = 10; const MAX_FILES: usize = 10000; const BATCH_SIZE: usize = 100;
let mut file_count = 0;
let paths: Vec<_> = WalkDir::new(root_path)
.follow_links(false)
.max_depth(MAX_DEPTH) .into_iter()
.filter_map(std::result::Result::ok)
.filter(|entry| {
let path = entry.path();
!path.is_dir() && !gitignore.matched(path, false).is_ignore()
})
.take(MAX_FILES) .map(|entry| {
file_count += 1;
if file_count % 1000 == 0 {
eprintln!("📁 Scanning files... ({file_count} so far)");
}
entry.path().to_path_buf()
})
.collect();
if file_count > MAX_FILES / 2 {
eprintln!(
"⚠️ Large project detected: {file_count} files. Limited to {MAX_FILES} for performance."
);
}
let mut all_results = Vec::new();
for chunk in paths.chunks(BATCH_SIZE) {
let batch_tasks: Vec<_> = chunk
.iter()
.map(|path| {
let path = path.clone();
let toolchain = toolchain.to_string();
let cache_manager = cache_manager.clone();
tokio::spawn(async move {
let timeout_duration = tokio::time::Duration::from_secs(5);
tokio::time::timeout(timeout_duration, async move {
analyze_file_by_toolchain(&path, &toolchain, cache_manager).await
})
.await
.ok()
.flatten()
})
})
.collect();
let batch_results = join_all(batch_tasks).await;
all_results.extend(
batch_results
.into_iter()
.filter_map(std::result::Result::ok)
.flatten(),
);
}
all_results
}
async fn scan_and_analyze_files_persistent(
root_path: &Path,
toolchain: &str,
cache_manager: Option<Arc<PersistentCacheManager>>,
gitignore: &ignore::gitignore::Gitignore,
) -> Vec<FileContext> {
const MAX_DEPTH: usize = 10; const MAX_FILES: usize = 10000;
let mut files = Vec::new();
let mut file_count = 0;
for entry in WalkDir::new(root_path)
.follow_links(false)
.max_depth(MAX_DEPTH) .into_iter()
.filter_map(std::result::Result::ok)
{
let path = entry.path();
if gitignore.matched(path, path.is_dir()).is_ignore() {
continue;
}
file_count += 1;
if file_count > MAX_FILES {
eprintln!("⚠️ Reached file limit of {MAX_FILES}. Stopping analysis.");
break;
}
if file_count % 1000 == 0 {
eprintln!("📁 Scanning files... ({file_count} so far)");
}
let timeout_duration = tokio::time::Duration::from_secs(5);
let result = tokio::time::timeout(timeout_duration, async {
analyze_file_by_toolchain_persistent(path, toolchain, cache_manager.clone()).await
})
.await;
if let Ok(Some(file_context)) = result {
files.push(file_context);
}
}
files
}