use crate::config::PackageToolsConfig;
use crate::error::{ChangesetError, ChangesetResult};
use std::collections::HashSet;
use std::path::{Path, PathBuf};
use sublime_git_tools::{Repo, RepoCommit};
use sublime_standard_tools::config::MonorepoConfig;
use sublime_standard_tools::filesystem::{AsyncFileSystem, FileSystemManager};
use sublime_standard_tools::monorepo::{MonorepoDetector, MonorepoDetectorTrait, WorkspacePackage};
#[derive(Debug)]
pub struct PackageDetector<'a> {
workspace_root: PathBuf,
repo: &'a Repo,
fs: FileSystemManager,
monorepo_detector: MonorepoDetector<FileSystemManager>,
}
impl<'a> PackageDetector<'a> {
#[must_use]
pub fn new(workspace_root: impl Into<PathBuf>, repo: &'a Repo, fs: FileSystemManager) -> Self {
let workspace_root = workspace_root.into();
let monorepo_detector = MonorepoDetector::with_filesystem(fs.clone());
Self { workspace_root, repo, fs, monorepo_detector }
}
#[must_use]
pub fn new_with_config(
workspace_root: impl Into<PathBuf>,
repo: &'a Repo,
fs: FileSystemManager,
config: &PackageToolsConfig,
) -> Self {
let workspace_root = workspace_root.into();
let monorepo_config = Self::build_monorepo_config(config);
let monorepo_detector =
MonorepoDetector::with_filesystem_and_config(fs.clone(), monorepo_config);
Self { workspace_root, repo, fs, monorepo_detector }
}
#[must_use]
fn build_monorepo_config(config: &PackageToolsConfig) -> MonorepoConfig {
let mut monorepo_config = config.standard_config.monorepo.clone();
if let Some(ref workspace) = config.workspace
&& !workspace.patterns.is_empty()
{
for pattern in &workspace.patterns {
if !monorepo_config.workspace_patterns.contains(pattern) {
monorepo_config.workspace_patterns.push(pattern.clone());
}
}
}
monorepo_config
}
pub async fn detect_affected_packages(
&self,
commit_ids: &[String],
) -> ChangesetResult<Vec<String>> {
if commit_ids.is_empty() {
return Ok(Vec::new());
}
let is_monorepo = self.is_monorepo().await?;
let changed_files = self.get_changed_files_from_commits(commit_ids)?;
if changed_files.is_empty() {
return Ok(Vec::new());
}
let packages = if is_monorepo {
self.map_files_to_packages_monorepo(&changed_files).await?
} else {
self.map_files_to_packages_single(&changed_files).await?
};
Ok(packages)
}
pub async fn is_monorepo(&self) -> ChangesetResult<bool> {
let result = self.monorepo_detector.is_monorepo_root(&self.workspace_root).await;
match result {
Ok(Some(_)) => Ok(true),
Ok(None) => Ok(false),
Err(e) => Err(ChangesetError::GitIntegration {
operation: "monorepo detection".to_string(),
reason: format!("Failed to detect monorepo: {}", e),
}),
}
}
pub async fn list_packages(&self) -> ChangesetResult<Vec<String>> {
let is_monorepo = self.is_monorepo().await?;
if is_monorepo {
let packages = self.get_workspace_packages().await?;
Ok(packages.iter().map(|p| p.name.clone()).collect())
} else {
let package_json_path = self.workspace_root.join("package.json");
if self.fs.exists(&package_json_path).await {
let content = self.fs.read_file_string(&package_json_path).await.map_err(|e| {
ChangesetError::GitIntegration {
operation: "read package.json".to_string(),
reason: format!("Failed to read package.json: {}", e),
}
})?;
let package_json: serde_json::Value =
serde_json::from_str(&content).map_err(|e| ChangesetError::GitIntegration {
operation: "parse package.json".to_string(),
reason: format!("Failed to parse package.json: {}", e),
})?;
if let Some(name) = package_json.get("name").and_then(|n| n.as_str()) {
return Ok(vec![name.to_string()]);
}
}
Ok(Vec::new())
}
}
pub fn get_commits_between(
&self,
from_ref: &str,
to_ref: &str,
) -> ChangesetResult<Vec<RepoCommit>> {
self.repo.get_commits_between(from_ref, to_ref, &None).map_err(|e| {
ChangesetError::GitIntegration {
operation: format!("get commits between {} and {}", from_ref, to_ref),
reason: format!("Failed to get commits: {}", e),
}
})
}
pub fn get_commits_since(&self, since: Option<String>) -> ChangesetResult<Vec<RepoCommit>> {
self.repo.get_commits_since(since, &None).map_err(|e| ChangesetError::GitIntegration {
operation: "get commits since reference".to_string(),
reason: format!("Failed to get commits: {}", e),
})
}
fn get_changed_files_from_commits(
&self,
commit_ids: &[String],
) -> ChangesetResult<Vec<PathBuf>> {
if commit_ids.is_empty() {
return Ok(Vec::new());
}
let mut all_files = HashSet::new();
for commit_id in commit_ids {
let files = self.repo.get_files_changed_in_commit(commit_id).map_err(|e| {
ChangesetError::GitIntegration {
operation: format!("get files changed in commit {}", commit_id),
reason: format!("Failed to get changed files: {}", e),
}
})?;
all_files.extend(files.into_iter().map(|f| PathBuf::from(f.path)));
}
Ok(all_files.into_iter().collect())
}
async fn map_files_to_packages_monorepo(
&self,
changed_files: &[PathBuf],
) -> ChangesetResult<Vec<String>> {
let packages = self.get_workspace_packages().await?;
let mut affected_packages = HashSet::new();
for file in changed_files {
let file_absolute =
if file.is_absolute() { file.clone() } else { self.workspace_root.join(file) };
let file_canonical =
file_absolute.canonicalize().unwrap_or_else(|_| file_absolute.clone());
for package in &packages {
let package_canonical = package
.absolute_path
.canonicalize()
.unwrap_or_else(|_| package.absolute_path.clone());
if file_canonical.starts_with(&package_canonical) {
affected_packages.insert(package.name.clone());
break;
}
}
}
Ok(affected_packages.into_iter().collect())
}
async fn map_files_to_packages_single(
&self,
changed_files: &[PathBuf],
) -> ChangesetResult<Vec<String>> {
if changed_files.is_empty() {
return Ok(Vec::new());
}
let packages = self.list_packages().await?;
if packages.is_empty() {
return Ok(Vec::new());
}
Ok(packages)
}
async fn get_workspace_packages(&self) -> ChangesetResult<Vec<WorkspacePackage>> {
self.monorepo_detector.detect_packages(&self.workspace_root).await.map_err(|e| {
ChangesetError::GitIntegration {
operation: "get workspace packages".to_string(),
reason: format!("Failed to get workspace packages: {}", e),
}
})
}
#[must_use]
pub fn workspace_root(&self) -> &Path {
&self.workspace_root
}
#[must_use]
pub fn repo(&self) -> &Repo {
self.repo
}
}