use governor_core::domain::{
changelog::Changelog,
commit::{Commit, CommitType},
dependency::{DependencyGraph, WorkspaceDependency},
version::{BreakingChange, BumpType, Feature, Fix, SemanticVersion},
workspace::WorkingTreeStatus,
};
use governor_core::traits::{
checkpoint_store::{Checkpoint, CheckpointStore},
registry::{CratePackage, PublishResult, Registry},
source_control::{SourceControl, format_commit_message, format_tag_name},
};
use serde::Serialize;
use std::collections::{HashMap, HashSet};
use std::path::Path;
use std::time::Duration;
use tokio::time::sleep;
use crate::error::{ApplicationError, ApplicationResult};
use crate::ports::{ChangelogUpdate, CommandPort, ReleasePackage, VersionUpdate, WorkspacePort};
#[derive(Debug, Clone, Serialize)]
pub struct CheckItem {
pub name: String,
pub passed: bool,
pub exit_code: Option<i32>,
pub message: String,
pub stdout: String,
pub stderr: String,
}
#[derive(Debug, Clone)]
pub struct AnalyzeInput {
pub workspace_path: String,
pub bump_override: Option<String>,
pub since: Option<String>,
pub risk_analysis: bool,
pub allow_version_drift: bool,
}
#[derive(Debug, Clone, Serialize)]
pub struct AnalyzeOutput {
pub workspace: String,
pub current_version: String,
pub recommended_bump: BumpType,
pub new_version: String,
pub confidence: f64,
pub reasoning: String,
pub commits_analyzed: usize,
pub breaking_changes: Vec<BreakingChange>,
pub features: Vec<Feature>,
pub fixes: Vec<Fix>,
pub risk_assessment: Option<RiskAssessment>,
}
#[derive(Debug, Clone)]
pub struct PlanInput {
pub workspace_path: String,
pub include_published: bool,
pub allow_version_drift: bool,
}
#[derive(Debug, Clone, Serialize)]
pub struct PublicationCrate {
pub order: usize,
pub name: String,
pub version: String,
pub dependencies: Vec<String>,
pub status: String,
pub estimated_publish_time_sec: u64,
}
#[derive(Debug, Clone, Serialize)]
pub struct SkippedCrate {
pub name: String,
pub version: String,
pub reason: String,
}
#[derive(Debug, Clone, Serialize)]
pub struct PublicationBatch {
pub number: usize,
pub crates: Vec<String>,
pub can_parallelize: bool,
}
#[derive(Debug, Clone, Serialize)]
pub struct PlanOutput {
pub workspace: String,
pub success: bool,
pub current_version: String,
pub new_version: String,
pub bump: BumpType,
pub nodes: usize,
pub edges: usize,
pub publication_order: Vec<PublicationCrate>,
pub skipped_crates: Vec<SkippedCrate>,
pub batches: Vec<PublicationBatch>,
}
#[derive(Debug, Clone)]
pub struct StatusInput {
pub workspace_path: String,
pub all: bool,
pub show_deps: bool,
}
#[derive(Debug, Clone, Serialize)]
pub struct StatusCrate {
pub name: String,
pub version: Option<String>,
pub status: String,
}
#[derive(Debug, Clone, Serialize)]
pub struct StatusOutput {
pub workspace: String,
pub current_version: String,
pub branch: String,
pub last_tag: Option<String>,
pub commits_since_tag: usize,
pub git: WorkingTreeStatus,
pub crates: Vec<StatusCrate>,
pub publish_order: Option<Vec<String>>,
}
#[derive(Debug, Clone)]
pub struct CheckInput {
pub workspace_path: String,
pub checks: Option<String>,
pub fail_fast: bool,
}
#[derive(Debug, Clone, Serialize)]
pub struct CheckOutput {
pub success: bool,
pub checks: Vec<CheckItem>,
}
#[derive(Debug, Clone)]
#[allow(clippy::struct_excessive_bools)]
pub struct BumpInput {
pub workspace_path: String,
pub version: Option<String>,
pub bump: Option<String>,
pub no_changelog: bool,
pub no_commit: bool,
pub no_tag: bool,
pub commit_template: Option<String>,
pub tag_template: Option<String>,
pub dry_run: bool,
pub allow_version_drift: bool,
pub allow_dirty: bool,
}
#[derive(Debug, Clone, Serialize)]
pub struct BumpOutput {
pub workspace: String,
pub previous_version: String,
pub new_version: String,
pub changed: bool,
pub files_modified: Vec<String>,
pub commit_hash: Option<String>,
pub tag_created: bool,
pub dry_run: bool,
}
#[derive(Debug, Clone)]
pub struct PublishInput {
pub workspace_path: String,
pub skip_checks: Option<String>,
pub only: Option<String>,
pub exclude: Option<String>,
pub delay: Option<u64>,
pub max_retries: Option<usize>,
pub on_error: Option<String>,
pub dry_run: bool,
pub allow_version_drift: bool,
pub allow_dirty: bool,
}
#[derive(Debug, Clone, Serialize)]
pub struct PublishedCrate {
pub name: String,
pub version: String,
pub publish_time_ms: u64,
pub crates_io_url: String,
}
#[derive(Debug, Clone, Serialize)]
pub struct PublishOutput {
pub workspace: String,
pub crates_published: Vec<PublishedCrate>,
pub crates_skipped: Vec<SkippedCrate>,
pub crates_failed: Vec<(String, String, String)>,
pub dry_run: bool,
pub checks: Vec<CheckItem>,
}
#[derive(Debug, Clone)]
pub struct FullInput {
pub workspace_path: String,
pub bump: BumpInput,
pub publish: PublishInput,
pub skip_checks: bool,
pub no_push: bool,
}
#[derive(Debug, Clone, Serialize)]
pub struct FullOutput {
pub workspace: String,
pub steps_completed: Vec<String>,
pub version: String,
pub publish: PublishOutput,
pub dry_run: bool,
}
#[derive(Debug, Clone)]
pub struct SimulateInput {
pub workspace_path: String,
pub version: Option<String>,
pub downstream_crates: Option<usize>,
}
#[derive(Debug, Clone, Serialize)]
pub struct SimulateOutput {
pub workspace: String,
pub version_diff: HashMap<String, String>,
pub breaking_changes: usize,
pub features: usize,
pub fixes: usize,
pub other: usize,
pub recommendations: Vec<String>,
}
#[derive(Debug, Clone)]
pub struct ResumeInput {
pub workspace_path: String,
pub checkpoint: Option<String>,
pub list: bool,
pub clean: bool,
}
#[derive(Debug, Clone, Serialize)]
pub struct ResumeOutput {
pub workspace: String,
pub checkpoints: Vec<Checkpoint>,
pub cleared: bool,
}
#[derive(Debug, Clone, Serialize)]
pub struct RiskFactor {
pub name: String,
pub weight: f64,
pub reason: String,
}
#[derive(Debug, Clone, Serialize)]
pub struct RiskAssessment {
pub score: f64,
pub level: String,
pub factors: Vec<RiskFactor>,
}
#[derive(Debug, Clone)]
struct Analysis {
breaking: Vec<BreakingCommit>,
features: Vec<FeatureCommit>,
fixes: Vec<FixCommit>,
}
#[derive(Debug, Clone)]
struct BreakingCommit {
hash: String,
message: String,
}
#[derive(Debug, Clone)]
struct FeatureCommit {
hash: String,
message: String,
scope: Option<String>,
}
#[derive(Debug, Clone)]
struct FixCommit {
hash: String,
message: String,
scope: Option<String>,
}
fn analyze_commits(commits: &[Commit]) -> Analysis {
let mut analysis = Analysis {
breaking: Vec::new(),
features: Vec::new(),
fixes: Vec::new(),
};
for commit in commits {
match commit.commit_type {
Some(CommitType::Feat | CommitType::Fix | CommitType::Perf | CommitType::Refactor)
if commit.breaking =>
{
analysis.breaking.push(BreakingCommit {
hash: commit.hash.clone(),
message: commit.short_message().to_string(),
});
}
Some(CommitType::Feat) => analysis.features.push(FeatureCommit {
hash: commit.hash.clone(),
message: commit.short_message().to_string(),
scope: commit.scope.clone(),
}),
Some(CommitType::Fix | CommitType::Perf) => analysis.fixes.push(FixCommit {
hash: commit.hash.clone(),
message: commit.short_message().to_string(),
scope: commit.scope.clone(),
}),
_ => {}
}
}
analysis
}
const fn analyze_bump_type(analysis: &Analysis) -> BumpType {
if !analysis.breaking.is_empty() {
BumpType::Major
} else if !analysis.features.is_empty() {
BumpType::Minor
} else if !analysis.fixes.is_empty() {
BumpType::Patch
} else {
BumpType::None
}
}
fn build_breaking_changes(commits: Vec<BreakingCommit>, package_name: &str) -> Vec<BreakingChange> {
commits
.into_iter()
.map(|commit| BreakingChange {
commit_hash: commit.hash.clone(),
short_hash: commit.hash.chars().take(7).collect(),
message: commit.message.clone(),
breaking_description: commit.message,
affected_crates: vec![package_name.to_string()],
migration_complexity: governor_core::domain::version::MigrationComplexity::Medium,
})
.collect()
}
fn build_features(commits: Vec<FeatureCommit>, package_name: &str) -> Vec<Feature> {
commits
.into_iter()
.map(|commit| Feature {
commit_hash: commit.hash.clone(),
short_hash: commit.hash.chars().take(7).collect(),
message: commit.message,
scope: commit.scope,
affected_crates: vec![package_name.to_string()],
})
.collect()
}
fn build_fixes(commits: Vec<FixCommit>, package_name: &str) -> Vec<Fix> {
commits
.into_iter()
.map(|commit| Fix {
commit_hash: commit.hash.clone(),
short_hash: commit.hash.chars().take(7).collect(),
message: commit.message,
scope: commit.scope,
affected_crates: vec![package_name.to_string()],
})
.collect()
}
fn calculate_confidence(analysis: &Analysis, bump_type: BumpType) -> f64 {
let mut score: f64 = match bump_type {
BumpType::Major => 0.95,
BumpType::Minor => 0.85,
BumpType::Patch => 0.75,
BumpType::None => 0.50,
};
if !analysis.breaking.is_empty() {
score += 0.03;
}
if analysis.features.len() > 3 || analysis.fixes.len() > 5 {
score += 0.02;
}
score.clamp(0.0, 1.0)
}
fn generate_reasoning(
analysis: &Analysis,
bump_type: BumpType,
current: &SemanticVersion,
recommended: &SemanticVersion,
) -> String {
match bump_type {
BumpType::Major => format!(
"{} breaking changes detected. Version should advance from {current} to {recommended}.",
analysis.breaking.len()
),
BumpType::Minor => format!(
"{} feature commits detected. Version should advance from {current} to {recommended}.",
analysis.features.len()
),
BumpType::Patch => format!(
"{} fix/perf commits detected. Version should advance from {current} to {recommended}.",
analysis.fixes.len()
),
BumpType::None => format!("No significant changes detected. Version remains at {current}."),
}
}
fn calculate_risk_assessment(
breaking_changes: &[BreakingChange],
features: &[Feature],
fixes: &[Fix],
) -> RiskAssessment {
let mut factors = Vec::new();
let mut score = 0.0;
if !breaking_changes.is_empty() {
score += 0.65;
factors.push(RiskFactor {
name: "breaking_changes".to_string(),
weight: 0.65,
reason: "Breaking changes usually require migration work.".to_string(),
});
}
if features.len() > 2 {
score += 0.20;
factors.push(RiskFactor {
name: "feature_count".to_string(),
weight: 0.20,
reason: "Multiple feature commits raise change surface area.".to_string(),
});
}
if fixes.len() > 5 {
score += 0.10;
factors.push(RiskFactor {
name: "fix_volume".to_string(),
weight: 0.10,
reason: "High fix count often indicates churn before release.".to_string(),
});
}
let level = if score >= 0.75 {
"high"
} else if score >= 0.35 {
"medium"
} else {
"low"
};
RiskAssessment {
score,
level: level.to_string(),
factors,
}
}
fn build_dependency_graph(packages: &[ReleasePackage]) -> DependencyGraph {
let mut graph = DependencyGraph::new();
for pkg in packages {
for dependency in &pkg.dependencies {
graph.add(WorkspaceDependency::new(
pkg.name.clone(),
dependency.clone(),
"workspace".to_string(),
));
}
}
graph
}
fn build_batches(order: &[String]) -> Vec<PublicationBatch> {
order
.iter()
.enumerate()
.map(|(idx, name)| PublicationBatch {
number: idx + 1,
crates: vec![name.clone()],
can_parallelize: false,
})
.collect()
}
fn default_checks(checks: Option<&str>) -> Vec<String> {
checks.map_or_else(
|| {
vec![
"test".to_string(),
"clippy".to_string(),
"fmt".to_string(),
"doc".to_string(),
"build".to_string(),
]
},
|csv| {
csv.split(',')
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToOwned::to_owned)
.collect()
},
)
}
fn normalize_scope(commits: &[Commit]) -> HashSet<String> {
commits
.iter()
.filter_map(|commit| commit.scope.clone())
.collect()
}
fn parse_bump_override(value: Option<&str>, analysis: &Analysis) -> ApplicationResult<BumpType> {
match value {
Some("major") => Ok(BumpType::Major),
Some("minor") => Ok(BumpType::Minor),
Some("patch") => Ok(BumpType::Patch),
Some("none") => Ok(BumpType::None),
Some("auto") | None => Ok(analyze_bump_type(analysis)),
Some(other) => Err(ApplicationError::InvalidArguments(format!(
"unsupported bump strategy `{other}`"
))),
}
}
fn parse_target_version(
current: &SemanticVersion,
version: Option<&str>,
) -> ApplicationResult<SemanticVersion> {
version.map_or_else(
|| Ok(current.clone()),
|value| {
SemanticVersion::parse(value).map_err(|error| {
ApplicationError::InvalidArguments(format!(
"failed to parse semantic version `{value}`: {error}"
))
})
},
)
}
async fn release_commits_since_last_tag<S: SourceControl>(
source_control: &S,
) -> ApplicationResult<Vec<Commit>> {
let last_tag = source_control.get_last_tag(Some("v")).await?;
source_control
.get_commits_since(last_tag.as_deref())
.await
.map_err(Into::into)
}
fn changelog_from_commits(version: &SemanticVersion, commits: &[Commit]) -> Changelog {
Changelog::from_commits(version.clone(), commits)
}
fn parse_release_tag_version(tag: &str) -> Option<SemanticVersion> {
let normalized = tag.strip_prefix('v').unwrap_or(tag);
SemanticVersion::parse(normalized).ok()
}
fn version_drift_message(current: &SemanticVersion, last_tag: &str) -> Option<String> {
let tagged_version = parse_release_tag_version(last_tag)?;
if current == &tagged_version {
return None;
}
let relation = match current.version.cmp(&tagged_version.version) {
std::cmp::Ordering::Greater => "ahead of",
std::cmp::Ordering::Less => "behind",
std::cmp::Ordering::Equal => return None,
};
Some(format!(
"workspace version {current} is {relation} last release tag {last_tag}. Commit your code changes first and let cargo-governor perform the release bump/changelog/tag step for you. Use --allow-version-drift only when you intentionally need to override this safety check."
))
}
async fn enforce_version_alignment<S: SourceControl>(
source_control: &S,
current_version: &SemanticVersion,
allow_version_drift: bool,
) -> ApplicationResult<()> {
if allow_version_drift {
return Ok(());
}
let Some(last_tag) = source_control.get_last_tag(Some("v")).await? else {
return Ok(());
};
if let Some(message) = version_drift_message(current_version, &last_tag) {
return Err(ApplicationError::InvalidArguments(message));
}
Ok(())
}
async fn enforce_clean_worktree<S: SourceControl>(
source_control: &S,
allow_dirty: bool,
) -> ApplicationResult<()> {
if allow_dirty {
return Ok(());
}
let status = source_control.get_working_tree_status().await?;
if status.is_clean() {
return Ok(());
}
let mut changed_files = status
.modified
.iter()
.chain(status.added.iter())
.chain(status.deleted.iter())
.chain(status.untracked.iter())
.take(5)
.cloned()
.collect::<Vec<_>>();
if status.total_changes() + status.untracked.len() > changed_files.len() {
changed_files.push("...".to_string());
}
Err(ApplicationError::InvalidArguments(format!(
"working tree is dirty. Commit or stash your changes before running a mutating release command so cargo-governor can own the release commit. Changed files: {}. Use --allow-dirty only when you intentionally need to override this safety check.",
changed_files.join(", ")
)))
}
pub async fn analyze<W: WorkspacePort, S: SourceControl>(
workspace_port: &W,
source_control: &S,
input: AnalyzeInput,
) -> ApplicationResult<AnalyzeOutput> {
let workspace = workspace_port
.load_release_workspace(Path::new(&input.workspace_path))
.await?;
enforce_version_alignment(
source_control,
&workspace.current_version,
input.allow_version_drift,
)
.await?;
let commits = if let Some(since) = input.since.as_deref() {
source_control.get_commits_since(Some(since)).await?
} else {
release_commits_since_last_tag(source_control).await?
};
let analysis = analyze_commits(&commits);
let bump_type = parse_bump_override(input.bump_override.as_deref(), &analysis)?;
let recommended = bump_type.apply_to(&workspace.current_version);
let breaking_changes = build_breaking_changes(analysis.breaking.clone(), &workspace.name);
let features = build_features(analysis.features.clone(), &workspace.name);
let fixes = build_fixes(analysis.fixes.clone(), &workspace.name);
let confidence = calculate_confidence(&analysis, bump_type);
let reasoning = generate_reasoning(
&analysis,
bump_type,
&workspace.current_version,
&recommended,
);
let risk_assessment = input
.risk_analysis
.then(|| calculate_risk_assessment(&breaking_changes, &features, &fixes));
Ok(AnalyzeOutput {
workspace: workspace.name,
current_version: workspace.current_version.to_string(),
recommended_bump: bump_type,
new_version: recommended.to_string(),
confidence,
reasoning,
commits_analyzed: commits.len(),
breaking_changes,
features,
fixes,
risk_assessment,
})
}
pub async fn plan<W: WorkspacePort, S: SourceControl, R: Registry>(
workspace_port: &W,
source_control: &S,
registry: &R,
input: PlanInput,
) -> ApplicationResult<PlanOutput> {
let workspace = workspace_port
.load_release_workspace(Path::new(&input.workspace_path))
.await?;
enforce_version_alignment(
source_control,
&workspace.current_version,
input.allow_version_drift,
)
.await?;
let commits = release_commits_since_last_tag(source_control).await?;
let analysis = analyze_commits(&commits);
let bump = analyze_bump_type(&analysis);
let new_version = bump.apply_to(&workspace.current_version);
let graph = build_dependency_graph(&workspace.packages);
let order = graph.publish_order().unwrap_or_default();
let edges = graph.dependencies.len();
let nodes = graph.all_crates().len();
let mut skipped = Vec::new();
let mut publication_order = Vec::new();
for name in &order {
if let Some(package) = workspace
.packages
.iter()
.find(|package| &package.name == name)
{
let published = registry
.is_published(&package.name, &package.version)
.await?;
if published && !input.include_published {
skipped.push(SkippedCrate {
name: package.name.clone(),
version: package.version.to_string(),
reason: "already_published".to_string(),
});
continue;
}
publication_order.push(PublicationCrate {
order: publication_order.len() + 1,
name: package.name.clone(),
version: new_version.to_string(),
dependencies: package.dependencies.clone(),
status: "ready".to_string(),
estimated_publish_time_sec: 45,
});
}
}
let batches = build_batches(
&publication_order
.iter()
.map(|item| item.name.clone())
.collect::<Vec<_>>(),
);
Ok(PlanOutput {
workspace: workspace.name,
success: !graph.has_cycles(),
current_version: workspace.current_version.to_string(),
new_version: new_version.to_string(),
bump,
nodes,
edges,
publication_order,
skipped_crates: skipped,
batches,
})
}
pub async fn status<W: WorkspacePort, S: SourceControl>(
workspace_port: &W,
source_control: &S,
input: StatusInput,
) -> ApplicationResult<StatusOutput> {
let workspace = workspace_port
.load_release_workspace(Path::new(&input.workspace_path))
.await?;
let branch = source_control.get_current_branch().await?;
let last_tag = source_control.get_last_tag(Some("v")).await?;
let commits_since_tag = if let Some(tag) = &last_tag {
source_control.get_commits_since(Some(tag)).await?
} else {
Vec::new()
};
let git = source_control.get_working_tree_status().await?;
let changed_scopes = normalize_scope(&commits_since_tag);
let crates = if input.all {
workspace
.packages
.iter()
.map(|package| StatusCrate {
name: package.name.clone(),
version: Some(package.version.to_string()),
status: if changed_scopes.contains(&package.name) {
"changed".to_string()
} else {
"unchanged".to_string()
},
})
.collect()
} else {
changed_scopes
.into_iter()
.map(|name| StatusCrate {
name,
version: None,
status: "changed".to_string(),
})
.collect()
};
let publish_order = input.show_deps.then(|| {
build_dependency_graph(&workspace.packages)
.publish_order()
.unwrap_or_default()
});
Ok(StatusOutput {
workspace: workspace.name,
current_version: workspace.current_version.to_string(),
branch,
last_tag,
commits_since_tag: commits_since_tag.len(),
git,
crates,
publish_order,
})
}
pub async fn check<C: CommandPort>(
command_port: &C,
input: CheckInput,
) -> ApplicationResult<CheckOutput> {
let mut checks = Vec::new();
let mut success = true;
for check_name in default_checks(input.checks.as_deref()) {
let report = command_port
.run_check(Path::new(&input.workspace_path), &check_name, true)
.await?;
let item = CheckItem {
name: report.name,
passed: report.success,
exit_code: report.exit_code,
message: if report.success {
"Passed".to_string()
} else {
format!("Check `{check_name}` failed")
},
stdout: report.stdout,
stderr: report.stderr,
};
success &= item.passed;
checks.push(item);
if input.fail_fast && !success {
break;
}
}
Ok(CheckOutput { success, checks })
}
fn version_from_commits(
current: &SemanticVersion,
commits: &[Commit],
bump_override: Option<&str>,
) -> ApplicationResult<SemanticVersion> {
let analysis = analyze_commits(commits);
let bump = parse_bump_override(bump_override, &analysis)?;
Ok(bump.apply_to(current))
}
pub async fn bump<W: WorkspacePort, S: SourceControl>(
workspace_port: &W,
source_control: &S,
input: BumpInput,
) -> ApplicationResult<BumpOutput> {
let workspace = workspace_port
.load_release_workspace(Path::new(&input.workspace_path))
.await?;
enforce_version_alignment(
source_control,
&workspace.current_version,
input.allow_version_drift,
)
.await?;
if !input.dry_run {
enforce_clean_worktree(source_control, input.allow_dirty).await?;
}
let target_version = if let Some(version) = input.version.as_deref() {
parse_target_version(&workspace.current_version, Some(version))?
} else {
let commits = release_commits_since_last_tag(source_control).await?;
version_from_commits(&workspace.current_version, &commits, input.bump.as_deref())?
};
let changed = target_version != workspace.current_version;
let version_update = if changed {
workspace_port
.update_workspace_version(
Path::new(&input.workspace_path),
&target_version,
input.dry_run,
)
.await?
} else {
VersionUpdate {
modified_files: Vec::new(),
}
};
let changelog_update = if input.no_changelog || !changed {
ChangelogUpdate {
updated: false,
modified_files: Vec::new(),
}
} else {
let commits = release_commits_since_last_tag(source_control).await?;
let changelog = changelog_from_commits(&target_version, &commits);
workspace_port
.update_changelog(Path::new(&input.workspace_path), &changelog, input.dry_run)
.await?
};
let mut files_modified = version_update.modified_files.clone();
for file in changelog_update.modified_files {
if !files_modified.contains(&file) {
files_modified.push(file);
}
}
let commit_hash = if changed && !input.no_commit && !input.dry_run {
let template = input
.commit_template
.as_deref()
.unwrap_or("chore(release): bump version to {{version}}");
let message = format_commit_message(template, &target_version);
source_control.commit(&message, &files_modified).await.ok()
} else {
None
};
let tag_created = if changed && !input.no_tag && !input.dry_run {
let template = input.tag_template.as_deref().unwrap_or("v{{version}}");
let tag_name = format_tag_name(template, &target_version);
source_control
.create_tag(&tag_name, &format!("Release {target_version}"))
.await
.is_ok()
} else {
false
};
Ok(BumpOutput {
workspace: workspace.name,
previous_version: workspace.current_version.to_string(),
new_version: target_version.to_string(),
changed,
files_modified,
commit_hash,
tag_created,
dry_run: input.dry_run,
})
}
fn parse_csv_filter(values: Option<&str>) -> HashSet<String> {
values
.map(|csv| {
csv.split(',')
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToOwned::to_owned)
.collect()
})
.unwrap_or_default()
}
async fn run_publish_checks<C: CommandPort>(
command_port: &C,
workspace_path: &str,
checks_to_skip: &HashSet<String>,
) -> ApplicationResult<Vec<CheckItem>> {
let mut checks = Vec::new();
for check_name in default_checks(None) {
if checks_to_skip.contains(&check_name) {
continue;
}
let report = command_port
.run_check(Path::new(workspace_path), &check_name, true)
.await?;
let item = CheckItem {
name: report.name,
passed: report.success,
exit_code: report.exit_code,
message: if report.success {
"Passed".to_string()
} else {
format!("Check `{check_name}` failed")
},
stdout: report.stdout,
stderr: report.stderr,
};
if !item.passed {
return Err(ApplicationError::InvalidArguments(item.message));
}
checks.push(item);
}
Ok(checks)
}
fn ordered_publish_packages(
workspace: &crate::ports::ReleaseWorkspace,
input: &PublishInput,
) -> Vec<ReleasePackage> {
let only = parse_csv_filter(input.only.as_deref());
let exclude = parse_csv_filter(input.exclude.as_deref());
let mut packages = workspace
.packages
.iter()
.filter(|package| package.publish)
.filter(|package| only.is_empty() || only.contains(&package.name))
.filter(|package| !exclude.contains(&package.name))
.cloned()
.collect::<Vec<_>>();
let graph = build_dependency_graph(&packages);
let ordered_names = graph.publish_order().unwrap_or_default();
packages.sort_by_key(|package| {
ordered_names
.iter()
.position(|name| name == &package.name)
.unwrap_or(usize::MAX)
});
packages
}
fn build_registry_package(package: &ReleasePackage, dry_run: bool) -> CratePackage {
CratePackage {
name: package.name.clone(),
version: package.version.clone(),
crate_file: Path::new("").to_path_buf(),
manifest_path: package.manifest_path.clone(),
token: String::new(),
dry_run,
}
}
async fn publish_package_with_retry<R: Registry>(
registry: &R,
package: &ReleasePackage,
input: &PublishInput,
) -> Result<PublishResult, String> {
let crate_data = build_registry_package(package, input.dry_run);
let max_retries = input.max_retries.unwrap_or(3);
let mut last_error = None;
for attempt in 0..max_retries {
match registry.publish(&crate_data).await {
Ok(result) => return Ok(result),
Err(error) => {
last_error = Some(error.to_string());
if attempt + 1 < max_retries {
sleep(Duration::from_secs(5)).await;
}
}
}
}
Err(last_error.unwrap_or_else(|| "publish failed".to_string()))
}
async fn publish_selected_packages<R: Registry>(
registry: &R,
packages: Vec<ReleasePackage>,
input: &PublishInput,
) -> ApplicationResult<(
Vec<PublishedCrate>,
Vec<SkippedCrate>,
Vec<(String, String, String)>,
)> {
let mut published = Vec::new();
let mut skipped = Vec::new();
let mut failed = Vec::new();
let continue_on_error = matches!(input.on_error.as_deref(), Some("skip"));
let publish_interval = input.delay.unwrap_or(2);
for (index, package) in packages.iter().enumerate() {
if registry
.is_published(&package.name, &package.version)
.await?
{
skipped.push(SkippedCrate {
name: package.name.clone(),
version: package.version.to_string(),
reason: "already_published".to_string(),
});
continue;
}
match publish_package_with_retry(registry, package, input).await {
Ok(result) => {
published.push(PublishedCrate {
name: result.crate_name,
version: result.version.to_string(),
publish_time_ms: result.duration_ms,
crates_io_url: result.crates_io_url,
});
let remaining_packages = &packages[index + 1..];
let has_dependent_follow_up = remaining_packages
.iter()
.any(|candidate| candidate.dependencies.contains(&package.name));
if !input.dry_run && has_dependent_follow_up {
wait_for_registry_visibility(
registry,
package,
Duration::from_secs(publish_interval),
Duration::from_secs(60),
)
.await?;
}
}
Err(error) => {
failed.push((package.name.clone(), package.version.to_string(), error));
if !continue_on_error {
break;
}
}
}
}
Ok((published, skipped, failed))
}
async fn wait_for_registry_visibility<R: Registry>(
registry: &R,
package: &ReleasePackage,
poll_interval: Duration,
timeout: Duration,
) -> ApplicationResult<()> {
let started_at = std::time::Instant::now();
loop {
if registry
.is_published(&package.name, &package.version)
.await?
{
return Ok(());
}
if started_at.elapsed() >= timeout {
return Err(governor_core::traits::registry::RegistryError::ApiError(format!(
"crate `{}` version {} was published locally but did not become visible in the registry within {} seconds",
package.name,
package.version,
timeout.as_secs()
))
.into());
}
sleep(poll_interval).await;
}
}
pub async fn publish<W: WorkspacePort, C: CommandPort, S: SourceControl, R: Registry>(
workspace_port: &W,
command_port: &C,
source_control: &S,
registry: &R,
input: PublishInput,
) -> ApplicationResult<PublishOutput> {
let workspace = workspace_port
.load_release_workspace(Path::new(&input.workspace_path))
.await?;
enforce_version_alignment(
source_control,
&workspace.current_version,
input.allow_version_drift,
)
.await?;
if !input.dry_run {
enforce_clean_worktree(source_control, input.allow_dirty).await?;
}
let checks_to_skip = parse_csv_filter(input.skip_checks.as_deref());
let checks =
match run_publish_checks(command_port, &input.workspace_path, &checks_to_skip).await {
Ok(checks) => checks,
Err(error) => {
return Ok(PublishOutput {
workspace: workspace.name,
crates_published: Vec::new(),
crates_skipped: Vec::new(),
crates_failed: vec![(
"checks".to_string(),
"checks".to_string(),
error.to_string(),
)],
dry_run: input.dry_run,
checks: Vec::new(),
});
}
};
let packages = ordered_publish_packages(&workspace, &input);
let (published, skipped, failed) =
publish_selected_packages(registry, packages, &input).await?;
Ok(PublishOutput {
workspace: workspace.name,
crates_published: published,
crates_skipped: skipped,
crates_failed: failed,
dry_run: input.dry_run,
checks,
})
}
pub async fn full<W: WorkspacePort, C: CommandPort, S: SourceControl, R: Registry>(
workspace_port: &W,
command_port: &C,
source_control: &S,
registry: &R,
input: FullInput,
) -> ApplicationResult<FullOutput> {
if !input.publish.dry_run {
enforce_clean_worktree(source_control, input.bump.allow_dirty).await?;
}
let mut steps_completed = Vec::new();
if !input.skip_checks {
let checks = check(
command_port,
CheckInput {
workspace_path: input.workspace_path.clone(),
checks: None,
fail_fast: true,
},
)
.await?;
if !checks.success {
return Err(ApplicationError::InvalidArguments(
"pre-publish checks failed".to_string(),
));
}
steps_completed.push("check".to_string());
}
let bump_output = bump(workspace_port, source_control, input.bump.clone()).await?;
steps_completed.push("bump".to_string());
if bump_output.commit_hash.is_some() {
steps_completed.push("commit".to_string());
}
if bump_output.tag_created {
steps_completed.push("tag".to_string());
}
let publish_output = publish(
workspace_port,
command_port,
source_control,
registry,
input.publish.clone(),
)
.await?;
steps_completed.push("publish".to_string());
if !input.no_push && !input.publish.dry_run {
source_control.push(None, None).await?;
steps_completed.push("push".to_string());
}
Ok(FullOutput {
workspace: bump_output.workspace,
steps_completed,
version: bump_output.new_version,
publish: publish_output,
dry_run: input.publish.dry_run,
})
}
pub async fn simulate<W: WorkspacePort, S: SourceControl>(
workspace_port: &W,
source_control: &S,
input: SimulateInput,
) -> ApplicationResult<SimulateOutput> {
let workspace = workspace_port
.load_release_workspace(Path::new(&input.workspace_path))
.await?;
let commits = release_commits_since_last_tag(source_control).await?;
let analysis = analyze_commits(&commits);
let target_version = parse_target_version(&workspace.current_version, input.version.as_deref())
.or_else(|_| version_from_commits(&workspace.current_version, &commits, None))?;
let mut version_diff = HashMap::new();
version_diff.insert("current".to_string(), workspace.current_version.to_string());
version_diff.insert("target".to_string(), target_version.to_string());
version_diff.insert(
"bump_type".to_string(),
if target_version.major() > workspace.current_version.major() {
"major"
} else if target_version.minor() > workspace.current_version.minor() {
"minor"
} else if target_version.patch() > workspace.current_version.patch() {
"patch"
} else {
"none"
}
.to_string(),
);
Ok(SimulateOutput {
workspace: workspace.name,
version_diff,
breaking_changes: analysis.breaking.len(),
features: analysis.features.len(),
fixes: analysis.fixes.len(),
other: commits.len().saturating_sub(
analysis.breaking.len() + analysis.features.len() + analysis.fixes.len(),
),
recommendations: {
let mut values = Vec::new();
if !analysis.breaking.is_empty() {
values.push("Update migration notes before release.".to_string());
}
if input.downstream_crates.unwrap_or(0) > 0 && !analysis.breaking.is_empty() {
values.push(
"Notify downstream maintainers about a potentially breaking release."
.to_string(),
);
}
if values.is_empty() {
values.push("No special simulation recommendations.".to_string());
}
values
},
})
}
pub async fn resume<S: CheckpointStore>(
store: &S,
input: ResumeInput,
) -> ApplicationResult<ResumeOutput> {
if input.clean {
store.clear().await?;
return Ok(ResumeOutput {
workspace: input.workspace_path,
checkpoints: Vec::new(),
cleared: true,
});
}
if input.list {
let checkpoints = store
.list()
.await?
.into_iter()
.map(|info| Checkpoint {
id: info.id,
workflow_id: info.workflow_id,
step_index: info.step_index,
completed_steps: Vec::new(),
state: serde_json::Value::Null,
target_version: None,
timestamp: info.timestamp,
interrupted_by_error: info.has_error,
error_message: None,
})
.collect();
return Ok(ResumeOutput {
workspace: input.workspace_path,
checkpoints,
cleared: false,
});
}
let checkpoint = if let Some(id) = input.checkpoint.as_deref() {
store.load_by_id(id).await?
} else {
store.load().await?
};
Ok(ResumeOutput {
workspace: input.workspace_path,
checkpoints: checkpoint.into_iter().collect(),
cleared: false,
})
}