use anyhow::Result;
use serde::{Deserialize, Serialize};
use std::path::{Path, PathBuf};
use tokio::fs;
use walkdir::WalkDir;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ProjectAnalysis {
pub name: String,
pub path: PathBuf,
pub structure: ProjectStructure,
pub metrics: ProjectMetrics,
pub dependencies: Vec<Dependency>,
pub issues: Vec<Issue>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ProjectStructure {
pub files: Vec<FileInfo>,
pub directories: Vec<String>,
pub total_size: u64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FileInfo {
pub path: String,
pub size: u64,
pub lines: usize,
pub language: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ProjectMetrics {
pub total_files: usize,
pub rust_files: usize,
pub test_files: usize,
pub doc_files: usize,
pub total_lines: usize,
pub code_lines: usize,
pub comment_lines: usize,
pub blank_lines: usize,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Dependency {
pub name: String,
pub version: String,
pub features: Vec<String>,
pub optional: bool,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Issue {
pub severity: IssueSeverity,
pub category: IssueCategory,
pub message: String,
pub file: Option<String>,
pub line: Option<usize>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum IssueSeverity {
Error,
Warning,
Info,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum IssueCategory {
Security,
Performance,
Style,
Documentation,
Dependency,
}
pub struct ProjectAnalyzer;
impl Default for ProjectAnalyzer {
fn default() -> Self {
Self::new()
}
}
impl ProjectAnalyzer {
#[must_use]
pub fn new() -> Self {
Self
}
pub async fn analyze(&self, project_path: &Path) -> Result<ProjectAnalysis> {
let name = project_path
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("unknown")
.to_string();
let structure = self.analyze_structure(project_path).await?;
let metrics = self.calculate_metrics(&structure);
let dependencies = self.analyze_dependencies(project_path).await?;
let issues = self
.detect_issues(project_path, &structure, &dependencies)
.await?;
Ok(ProjectAnalysis {
name,
path: project_path.to_path_buf(),
structure,
metrics,
dependencies,
issues,
})
}
async fn analyze_structure(&self, project_path: &Path) -> Result<ProjectStructure> {
let mut files = Vec::new();
let mut directories = Vec::new();
let mut total_size = 0;
for entry in WalkDir::new(project_path)
.follow_links(true)
.into_iter()
.filter_entry(|e| !is_ignored(e.path()))
{
let entry = entry?;
let path = entry.path();
if path.is_file() {
let relative_path = path
.strip_prefix(project_path)?
.to_string_lossy()
.to_string();
let metadata = fs::metadata(path).await?;
let size = metadata.len();
let (lines, language) = if let Ok(content) = fs::read_to_string(path).await {
let lines = content.lines().count();
let language = detect_language(path);
(lines, language)
} else {
(0, "binary".to_string())
};
files.push(FileInfo {
path: relative_path,
size,
lines,
language,
});
total_size += size;
} else if path.is_dir() {
let relative_path = path
.strip_prefix(project_path)?
.to_string_lossy()
.to_string();
if !relative_path.is_empty() {
directories.push(relative_path);
}
}
}
Ok(ProjectStructure {
files,
directories,
total_size,
})
}
fn calculate_metrics(&self, structure: &ProjectStructure) -> ProjectMetrics {
let mut metrics = ProjectMetrics {
total_files: structure.files.len(),
rust_files: 0,
test_files: 0,
doc_files: 0,
total_lines: 0,
code_lines: 0,
comment_lines: 0,
blank_lines: 0,
};
for file in &structure.files {
metrics.total_lines += file.lines;
if file.language == "rust" {
metrics.rust_files += 1;
if file.path.contains("test") || file.path.contains("tests/") {
metrics.test_files += 1;
}
}
if file.path.ends_with(".md") || file.path.ends_with(".txt") {
metrics.doc_files += 1;
}
}
metrics.code_lines = (metrics.total_lines as f64 * 0.6) as usize;
metrics.comment_lines = (metrics.total_lines as f64 * 0.2) as usize;
metrics.blank_lines = (metrics.total_lines as f64 * 0.2) as usize;
metrics
}
async fn analyze_dependencies(&self, project_path: &Path) -> Result<Vec<Dependency>> {
let cargo_toml_path = project_path.join("Cargo.toml");
if !cargo_toml_path.exists() {
return Ok(Vec::new());
}
let content = fs::read_to_string(&cargo_toml_path).await?;
let manifest: toml::Value = toml::from_str(&content)?;
let mut dependencies = Vec::new();
if let Some(deps) = manifest.get("dependencies").and_then(|d| d.as_table()) {
for (name, value) in deps {
let (version, features, optional) = parse_dependency_value(value);
dependencies.push(Dependency {
name: name.clone(),
version,
features,
optional,
});
}
}
Ok(dependencies)
}
async fn detect_issues(
&self,
project_path: &Path,
structure: &ProjectStructure,
dependencies: &[Dependency],
) -> Result<Vec<Issue>> {
let mut issues = Vec::new();
if !project_path.join("README.md").exists() {
issues.push(Issue {
severity: IssueSeverity::Warning,
category: IssueCategory::Documentation,
message: "Missing README.md file".to_string(),
file: None,
line: None,
});
}
if structure
.files
.iter()
.filter(|f| f.path.contains("test"))
.count()
== 0
{
issues.push(Issue {
severity: IssueSeverity::Warning,
category: IssueCategory::Documentation,
message: "No test files found".to_string(),
file: None,
line: None,
});
}
for dep in dependencies {
if dep.version.starts_with("0.") {
issues.push(Issue {
severity: IssueSeverity::Info,
category: IssueCategory::Dependency,
message: format!("Dependency '{}' is using a pre-1.0 version", dep.name),
file: Some("Cargo.toml".to_string()),
line: None,
});
}
}
Ok(issues)
}
}
fn is_ignored(path: &Path) -> bool {
let ignored_dirs = ["target", ".git", "node_modules", ".idea", ".vscode"];
let ignored_files = [".DS_Store", "Thumbs.db"];
if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) {
if ignored_files.contains(&file_name) {
return true;
}
}
path.components().any(|component| {
if let Some(name) = component.as_os_str().to_str() {
ignored_dirs.contains(&name)
} else {
false
}
})
}
fn detect_language(path: &Path) -> String {
match path.extension().and_then(|ext| ext.to_str()) {
Some("rs") => "rust".to_string(),
Some("toml") => "toml".to_string(),
Some("md") => "markdown".to_string(),
Some("yml" | "yaml") => "yaml".to_string(),
Some("json") => "json".to_string(),
Some("sh") => "shell".to_string(),
Some("py") => "python".to_string(),
Some("js") => "javascript".to_string(),
Some("ts") => "typescript".to_string(),
_ => "unknown".to_string(),
}
}
fn parse_dependency_value(value: &toml::Value) -> (String, Vec<String>, bool) {
match value {
toml::Value::String(version) => (version.clone(), Vec::new(), false),
toml::Value::Table(table) => {
let version = table
.get("version")
.and_then(|v| v.as_str())
.unwrap_or("*")
.to_string();
let features = table
.get("features")
.and_then(|v| v.as_array())
.map(|arr| {
arr.iter()
.filter_map(|v| v.as_str())
.map(std::string::ToString::to_string)
.collect()
})
.unwrap_or_default();
let optional = table
.get("optional")
.and_then(toml::Value::as_bool)
.unwrap_or(false);
(version, features, optional)
}
_ => ("*".to_string(), Vec::new(), false),
}
}