use std::{
collections::{HashMap, HashSet},
path::Path,
};
use anyhow::Context as _;
use cargo::util::VersionExt as _;
use cargo_metadata::{
Package, TargetKind,
camino::{Utf8Path, Utf8PathBuf},
semver::Version,
};
use cargo_utils::{CARGO_TOML, LocalManifest};
use git_cliff_core::{
config::{ChangelogConfig, Config},
contributor::RemoteContributor,
};
use git_cmd::Repo;
use next_version::NextVersion as _;
use rayon::iter::{IntoParallelRefMutIterator as _, ParallelIterator as _};
use std::sync::Once;
use tracing::{debug, info, instrument, warn};
use crate::{
ChangelogBuilder, ChangelogRequest, NO_COMMIT_ID, PackagePath as _, Project, Remote, RepoUrl,
UpdateResult,
changelog_filler::{fill_commit, get_required_info},
changelog_parser,
command::update::changelog_update::OldChangelogs,
diff::{Commit, Diff},
fs_utils, lock_compare,
registry_packages::{PackagesCollection, RegistryPackage},
semver_check::{self, SemverCheck},
toml_compare,
version::NextVersionFromDiff as _,
};
use super::{
PackagesToUpdate, PackagesUpdate, package_dependencies::PackageDependencies as _,
update_request::UpdateRequest,
};
static SEMVER_CHECK_LOG_ONCE: Once = Once::new();
#[derive(Debug)]
pub struct Updater<'a> {
pub project: &'a Project,
pub req: &'a UpdateRequest,
}
impl Updater<'_> {
#[instrument(skip_all)]
pub async fn packages_to_update(
&self,
registry_packages: &PackagesCollection,
repository: &Repo,
local_manifest_path: &Utf8Path,
) -> anyhow::Result<PackagesUpdate> {
debug!("calculating local packages");
let packages_diffs = self
.get_packages_diffs(registry_packages, repository)
.await?;
let version_groups = self.get_version_groups(&packages_diffs)?;
debug!("version groups: {:?}", version_groups);
let mut packages_to_check_for_deps: Vec<&Package> = vec![];
let mut packages_to_update = PackagesUpdate::default();
let workspace_version_pkgs: HashSet<String> = packages_diffs
.iter()
.filter(|(p, _)| {
let local_manifest_path = p.package_path().unwrap().join(CARGO_TOML);
let local_manifest = LocalManifest::try_new(&local_manifest_path).unwrap();
local_manifest.version_is_inherited()
})
.map(|(p, _)| p.name.to_string())
.collect();
let new_workspace_version = self.new_workspace_version(
local_manifest_path,
&packages_diffs,
&workspace_version_pkgs,
)?;
if let Some(new_workspace_version) = &new_workspace_version {
packages_to_update.with_workspace_version(new_workspace_version.clone());
}
let mut old_changelogs = OldChangelogs::new();
for (p, diff) in packages_diffs {
if let Some(release_commits_regex) = self.req.release_commits()
&& !diff.any_commit_matches(release_commits_regex)
{
info!("{}: no commit matches the `release_commits` regex", p.name);
packages_to_check_for_deps.push(p);
continue;
}
let next_version = self.get_next_version(
new_workspace_version.as_ref(),
p,
&workspace_version_pkgs,
&version_groups,
&diff,
)?;
debug!(
"package: {}, diff: {diff:?}, next_version: {next_version}",
p.name,
);
let current_version = p.version.clone();
let version_already_bumped = !diff.is_version_published && !diff.commits.is_empty();
if next_version != current_version
|| !diff.registry_package_exists
|| version_already_bumped
{
if version_already_bumped {
info!(
"{}: updating changelog for version {current_version}{}",
p.name,
diff.semver_check.outcome_str()
);
} else {
info!(
"{}: next version is {next_version}{}",
p.name,
diff.semver_check.outcome_str()
);
}
let update_result = self.calculate_update_result(
diff.commits,
next_version,
p,
diff.semver_check,
diff.registry_version,
&mut old_changelogs,
)?;
packages_to_update
.updates_mut()
.push((p.clone(), update_result));
} else if diff.is_version_published {
packages_to_check_for_deps.push(p);
}
}
let changed_packages: Vec<(&Package, Version)> = packages_to_update
.updates()
.iter()
.map(|(p, u)| (p, u.version.clone()))
.collect();
let dependent_packages =
self.dependent_packages_update(&packages_to_check_for_deps, &changed_packages)?;
packages_to_update.updates_mut().extend(dependent_packages);
Ok(packages_to_update)
}
fn get_version_groups(
&self,
packages_diffs: &[(&Package, Diff)],
) -> anyhow::Result<HashMap<String, Version>> {
let mut version_groups: HashMap<String, Version> = HashMap::new();
for (pkg, diff) in packages_diffs {
let pkg_config = self.req.get_package_config(&pkg.name);
let version_updater = pkg_config.generic.version_updater()?;
if let Some(version_group) = pkg_config.version_group {
let next_pkg_ver = pkg.version.next_from_diff(diff, version_updater);
match version_groups.entry(version_group.clone()) {
std::collections::hash_map::Entry::Occupied(v) => {
let max = v.get();
if max < &next_pkg_ver {
version_groups.insert(version_group, next_pkg_ver);
}
}
std::collections::hash_map::Entry::Vacant(_) => {
version_groups.insert(version_group, next_pkg_ver);
}
}
}
}
Ok(version_groups)
}
fn new_workspace_version(
&self,
local_manifest_path: &Utf8Path,
packages_diffs: &[(&Package, Diff)],
workspace_version_pkgs: &HashSet<String>,
) -> anyhow::Result<Option<Version>> {
let workspace_version = {
let local_manifest = LocalManifest::try_new(local_manifest_path)?;
local_manifest.get_workspace_version()
};
let mut new_versions = Vec::new();
for workspace_package in workspace_version_pkgs {
for (p, diff) in packages_diffs {
if *workspace_package == *p.name {
let pkg_config = self.req.get_package_config(&p.name);
let version_updater = pkg_config.generic.version_updater()?;
let next = p.version.next_from_diff(diff, version_updater);
if let Some(workspace_version) = &workspace_version
&& &next >= workspace_version
{
new_versions.push(next);
}
}
}
}
Ok(new_versions.into_iter().max())
}
async fn get_packages_diffs(
&self,
registry_packages: &PackagesCollection,
repository: &Repo,
) -> anyhow::Result<Vec<(&Package, Diff)>> {
let packages_diffs_res: anyhow::Result<Vec<(&Package, Diff)>> = self
.packages_to_process()
.iter()
.map(|&p| {
let diff = self
.get_diff(p, registry_packages, repository)
.with_context(|| {
format!("failed to retrieve difference of package {}", p.name)
})?;
Ok((p, diff))
})
.collect();
let mut packages_diffs = self.fill_commits(&packages_diffs_res?, repository).await?;
let packages_commits: HashMap<String, Vec<Commit>> = packages_diffs
.iter()
.map(|(p, d)| (p.name.to_string(), d.commits.clone()))
.collect();
let semver_check_result: anyhow::Result<()> =
packages_diffs.par_iter_mut().try_for_each(|(p, diff)| {
let registry_package = registry_packages.get_package(&p.name);
if let Some(registry_package) = registry_package {
let package_path = get_package_path(p, repository, self.project.root())
.context("can't retrieve package path")?;
let package_config = self.req.get_package_config(&p.name);
for pkg_to_include in &package_config.changelog_include {
if let Some(commits) = packages_commits.get(pkg_to_include) {
diff.add_commits(commits);
}
}
if should_check_semver(p, package_config.semver_check())
&& diff.should_update_version()
{
let registry_package_path = registry_package
.package_path()
.context("can't retrieve registry package path")?;
SEMVER_CHECK_LOG_ONCE.call_once(|| {
tracing::info!(
"Checking API compatibility with cargo-semver-checks..."
);
});
let semver_check =
semver_check::run_semver_check(&package_path, registry_package_path)
.context("error while running cargo-semver-checks")?;
diff.set_semver_check(semver_check);
}
}
Ok(())
});
semver_check_result?;
Ok(packages_diffs)
}
fn packages_to_process(&self) -> Vec<&Package> {
let mut packages_to_process: Vec<&Package> = Vec::new();
let mut package_names: HashSet<String> = HashSet::new();
for p in self.project.publishable_packages() {
if package_names.insert(p.name.to_string()) {
packages_to_process.push(p);
}
}
for p in self.project.workspace_packages() {
if self.req.should_use_git_only(&p.name) && package_names.insert(p.name.to_string()) {
packages_to_process.push(p);
}
}
packages_to_process
}
async fn fill_commits<'a>(
&self,
packages_diffs: &[(&'a Package, Diff)],
repository: &Repo,
) -> anyhow::Result<Vec<(&'a Package, Diff)>> {
let git_client = self.req.git_client()?;
let changelog_request: &ChangelogRequest = self.req.changelog_req();
let mut all_commits: HashMap<String, &Commit> = HashMap::new();
let mut packages_diffs = packages_diffs.to_owned();
if let Some(changelog_config) = changelog_request.changelog_config.as_ref() {
let required_info = get_required_info(&changelog_config.changelog);
for (_package, diff) in &mut packages_diffs {
for commit in &mut diff.commits {
fill_commit(
commit,
&required_info,
repository,
&mut all_commits,
git_client.as_ref(),
)
.await
.context(
"Failed to fetch the commit information required by the changelog template",
)?;
}
}
}
Ok(packages_diffs)
}
fn dependent_packages_update(
&self,
packages_to_check_for_deps: &[&Package],
initial_changed_packages: &[(&Package, Version)],
) -> anyhow::Result<PackagesToUpdate> {
let workspace_manifest = LocalManifest::try_new(self.req.local_manifest())?;
let workspace_dependencies = workspace_manifest.get_workspace_dependency_table();
let mut old_changelogs = OldChangelogs::new();
let workspace_dir = crate::manifest_dir(self.req.local_manifest())?;
let mut processed: HashSet<String> = initial_changed_packages
.iter()
.map(|(p, _)| p.name.to_string())
.collect();
let mut result = Vec::new();
let mut all_changed_packages: Vec<(&Package, Version)> = initial_changed_packages.to_vec();
loop {
let mut any_package_updated = false;
for p in packages_to_check_for_deps {
if processed.contains(p.name.as_ref()) {
continue;
}
if let Ok(deps) = p.dependencies_to_update(
&all_changed_packages,
workspace_dependencies,
workspace_dir,
) && !deps.is_empty()
{
let update =
self.calculate_package_update_result(&deps, p, &mut old_changelogs)?;
result.push(update.clone());
all_changed_packages.push((p, update.1.version.clone()));
processed.insert(p.name.to_string());
any_package_updated = true;
}
}
if !any_package_updated {
break;
}
}
Ok(result)
}
fn calculate_package_update_result(
&self,
deps: &[&Package],
p: &Package,
old_changelogs: &mut OldChangelogs,
) -> anyhow::Result<(Package, UpdateResult)> {
let deps: Vec<&str> = deps.iter().map(|d| d.name.as_str()).collect();
let commits = {
let change = format!(
"chore: updated the following local packages: {}",
deps.join(", ")
);
vec![Commit::new(NO_COMMIT_ID.to_string(), change)]
};
let next_version = if p.version.is_prerelease() {
p.version.increment_prerelease()
} else {
p.version.increment_patch()
};
info!(
"{}: dependencies changed. Next version is {next_version}",
p.name
);
let update_result = self.calculate_update_result(
commits,
next_version,
p,
SemverCheck::Skipped,
None, old_changelogs,
)?;
Ok((p.clone(), update_result))
}
fn calculate_update_result(
&self,
commits: Vec<Commit>,
next_version: Version,
p: &Package,
semver_check: SemverCheck,
registry_version: Option<Version>,
old_changelogs: &mut OldChangelogs,
) -> Result<UpdateResult, anyhow::Error> {
let changelog_path = self.req.changelog_path(p);
let old_changelog: Option<String> = old_changelogs.get_or_read(&changelog_path);
let update_result = self.update_result(
commits,
next_version,
p,
semver_check,
registry_version,
old_changelog.as_deref(),
)?;
if let Some(changelog) = &update_result.changelog {
old_changelogs.insert(changelog_path, changelog.clone());
}
Ok(update_result)
}
fn update_result(
&self,
commits: Vec<Commit>,
version: Version,
package: &Package,
semver_check: SemverCheck,
registry_version: Option<Version>,
old_changelog: Option<&str>,
) -> anyhow::Result<UpdateResult> {
let repo_url = self.req.repo_url();
let release_link = {
let prev_version = registry_version
.as_ref()
.unwrap_or(&package.version)
.to_string();
let prev_tag = self.project.git_tag(&package.name, &prev_version)?;
let next_tag = self.project.git_tag(&package.name, &version.to_string())?;
repo_url.map(|r| r.git_release_link(&prev_tag, &next_tag))
};
let changelog_outcome = {
let cfg = self.req.get_package_config(package.name.as_str());
let changelog_req = cfg
.should_update_changelog()
.then_some(self.req.changelog_req().clone());
let commits: Vec<Commit> = commits
.into_iter()
.filter_map(|c| {
if c.is_conventional() {
Some(c)
} else {
c.message.lines().next().map(|line| Commit {
message: line.to_string(),
..c
})
}
})
.collect();
changelog_req
.map(|r| {
get_changelog(
&commits,
&version,
Some(r),
old_changelog,
repo_url,
release_link.as_deref(),
package,
)
})
.transpose()
}?;
let (changelog, new_changelog_entry) = match changelog_outcome {
Some((changelog, new_changelog_entry)) => (Some(changelog), Some(new_changelog_entry)),
None => (None, None),
};
Ok(UpdateResult {
version,
changelog,
semver_check,
new_changelog_entry,
registry_version,
})
}
#[instrument(
skip_all,
fields(package = %package.name)
)]
fn get_diff(
&self,
package: &Package,
registry_packages: &PackagesCollection,
repository: &Repo,
) -> anyhow::Result<Diff> {
info!(
"determining next version for {} {}",
package.name, package.version
);
let package_path = get_package_path(package, repository, self.project.root())
.context("failed to determine package path")?;
repository
.checkout_head()
.context("can't checkout head to calculate diff")?;
let registry_package = registry_packages.get_registry_package(&package.name);
let mut diff = Diff::new(registry_package.is_some());
let pathbufs_to_check = pathbufs_to_check(&package_path, package)?;
let paths_to_check: Vec<&Path> = pathbufs_to_check.iter().map(|p| p.as_ref()).collect();
repository
.checkout_last_commit_at_paths(&paths_to_check)
.map_err(|err| {
if err
.to_string()
.contains("Your local changes to the following files would be overwritten")
{
err.context("The allow-dirty option can't be used in this case")
} else {
err.context("Failed to retrieve the last commit of local repository.")
}
})?;
let git_tag = self
.project
.git_tag(&package.name, &package.version.to_string())?;
let tag_commit = repository.get_tag_commit(&git_tag);
let using_git_only = || self.req.should_use_git_only(&package.name);
if tag_commit.is_some() && !using_git_only() {
let config = self.req.get_package_config(&package.name);
if config.should_publish() {
let registry_package = registry_package.with_context(|| format!("package `{}` not found in the registry, but the git tag {git_tag} exists. Consider running `cargo publish` manually to publish this package.", package.name))?;
anyhow::ensure!(
package.version <= registry_package.package.version,
"local package `{}` has a greater version ({}) with respect to the registry package ({}), but the git tag {git_tag} exists. Consider running `cargo publish` manually to publish the new version of this package.",
package.name,
package.version,
registry_package.package.version
);
}
}
self.get_package_diff(
&package_path,
package,
registry_package,
repository,
tag_commit.as_deref(),
&mut diff,
)?;
repository
.checkout_head()
.context("can't checkout to head after calculating diff")?;
Ok(diff)
}
fn get_package_diff(
&self,
package_path: &Utf8Path,
package: &Package,
registry_package: Option<&RegistryPackage>,
repository: &Repo,
tag_commit: Option<&str>,
diff: &mut Diff,
) -> anyhow::Result<()> {
let pathbufs_to_check = pathbufs_to_check(package_path, package)?;
let paths_to_check: Vec<&Path> = pathbufs_to_check.iter().map(|p| p.as_ref()).collect();
let max_analyze_commits = if registry_package.is_none() {
match self.req.max_analyze_commits() {
0 => u32::MAX,
n => n,
}
} else {
u32::MAX
};
for _ in 0..max_analyze_commits {
let current_commit_message = repository.current_commit_message()?;
let current_commit_hash = repository.current_commit_hash()?;
let are_changed_files_in_pkg = || {
self.are_changed_files_in_package(package_path, repository, ¤t_commit_hash)
};
if let Some(registry_package) = registry_package {
debug!(
"package {} found in cargo registry",
registry_package.package.name
);
let registry_package_path = registry_package.package.package_path()?;
let are_packages_equal = self.check_package_equality(
repository,
package,
package_path,
registry_package_path,
).with_context(|| format!("failed to check package equality for `{}` at commit {current_commit_hash}", package.name))?;
let commit_too_old = || {
is_commit_too_old(
repository,
tag_commit,
registry_package.published_at_sha1(),
¤t_commit_hash,
)
};
if are_packages_equal || commit_too_old() {
debug!(
"next version calculated starting from commits after `{current_commit_hash}`"
);
if diff.commits.is_empty() {
self.add_dependencies_update_if_any(
diff,
®istry_package.package,
package,
registry_package_path,
)?;
}
break;
} else {
if package.version > registry_package.package.version
&& diff.is_version_published
{
info!(
"{}: local version ({}) > registry version ({}). Only changelog will be updated.",
package.name, package.version, registry_package.package.version
);
diff.set_version_unpublished(registry_package.package.version.clone());
}
if are_changed_files_in_pkg()? {
debug!("packages contain different files");
diff.commits.push(Commit::new(
current_commit_hash,
current_commit_message.clone(),
));
}
}
} else if are_changed_files_in_pkg()? {
diff.commits.push(Commit::new(
current_commit_hash,
current_commit_message.clone(),
));
}
if let Err(_err) = repository.checkout_previous_commit_at_paths(&paths_to_check) {
debug!("there are no other commits");
break;
}
}
Ok(())
}
fn check_package_equality(
&self,
repository: &Repo,
package: &Package,
package_path: &Utf8Path,
registry_package_path: &Utf8Path,
) -> anyhow::Result<bool> {
if crate::is_readme_updated(&package.name, package_path, registry_package_path)? {
debug!("{}: README updated", package.name);
return Ok(false);
}
let cargo_lock_path = self
.get_cargo_lock_path(repository)
.context("failed to determine Cargo.lock path")?;
let are_packages_equal = crate::are_packages_equal(package_path, registry_package_path)
.context("cannot compare packages")?;
if let Some(cargo_lock_path) = cargo_lock_path.as_deref() {
repository
.checkout(cargo_lock_path)
.context("cannot revert changes introduced when comparing packages")?;
}
Ok(are_packages_equal)
}
fn add_dependencies_update_if_any(
&self,
diff: &mut Diff,
registry_package: &Package,
package: &Package,
registry_package_path: &Utf8Path,
) -> anyhow::Result<()> {
let are_toml_dependencies_updated = || {
toml_compare::are_toml_dependencies_updated(
®istry_package.dependencies,
&package.dependencies,
)
};
let are_lock_dependencies_updated = || {
lock_compare::are_lock_dependencies_updated(
&self.project.cargo_lock_path(),
registry_package_path,
)
.context("Can't check if Cargo.lock dependencies are up to date")
};
if are_toml_dependencies_updated() {
diff.commits.push(Commit::new(
NO_COMMIT_ID.to_string(),
"chore: update Cargo.toml dependencies".to_string(),
));
} else if contains_executable(package) && are_lock_dependencies_updated()? {
diff.commits.push(Commit::new(
NO_COMMIT_ID.to_string(),
"chore: update Cargo.lock dependencies".to_string(),
));
} else {
info!("{}: already up to date", package.name);
}
Ok(())
}
fn get_cargo_lock_path(&self, repository: &Repo) -> anyhow::Result<Option<String>> {
let project_cargo_lock = self.project.cargo_lock_path();
let relative_lock_path = fs_utils::strip_prefix(&project_cargo_lock, self.project.root())?;
let repository_cargo_lock = repository.directory().join(relative_lock_path);
if repository_cargo_lock.exists() {
Ok(Some(repository_cargo_lock.to_string()))
} else {
Ok(None)
}
}
fn get_next_version(
&self,
new_workspace_version: Option<&Version>,
p: &Package,
workspace_version_pkgs: &HashSet<String>,
version_groups: &HashMap<String, Version>,
diff: &Diff,
) -> anyhow::Result<Version> {
let pkg_config = self.req.get_package_config(&p.name);
let next_version = match new_workspace_version {
Some(max_workspace_version) if workspace_version_pkgs.contains(p.name.as_str()) => {
debug!(
"next version of {} is workspace version: {max_workspace_version}",
p.name
);
max_workspace_version.clone()
}
_ => {
if let Some(version_group) = pkg_config.version_group {
version_groups
.get(&version_group)
.with_context(|| {
format!("failed to retrieve version for version group {version_group}")
})?
.clone()
} else {
let version_updater = pkg_config.generic.version_updater()?;
p.version.next_from_diff(diff, version_updater)
}
}
};
Ok(next_version)
}
fn are_changed_files_in_package(
&self,
package_path: &Utf8Path,
repository: &Repo,
hash: &str,
) -> anyhow::Result<bool> {
let cargo_lock_path = self
.get_cargo_lock_path(repository)
.context("failed to determine Cargo.lock path")?;
let package_files_res = get_package_files(package_path, repository);
if let Some(cargo_lock_path) = cargo_lock_path.as_deref() {
repository
.checkout(cargo_lock_path)
.context("cannot revert changes introduced when comparing packages")?;
}
let Ok(package_files) = package_files_res.inspect_err(|e| {
debug!("failed to get package files at commit {hash}: {e:?}");
}) else {
return Ok(true);
};
let Ok(changed_files) = repository.files_of_current_commit().inspect_err(|e| {
warn!("failed to get changed files of commit {hash}: {e:?}");
}) else {
return Ok(true);
};
Ok(!package_files.is_disjoint(&changed_files))
}
}
fn should_check_semver(package: &Package, run_semver_check: bool) -> bool {
if run_semver_check && contains_library(package) {
let is_cargo_semver_checks_installed = semver_check::is_cargo_semver_checks_installed();
if !is_cargo_semver_checks_installed {
warn!(
"cargo-semver-checks not installed, skipping semver check. For more information, see https://release-plz.dev/docs/semver-check"
);
}
return is_cargo_semver_checks_installed;
}
false
}
fn contains_executable(package: &Package) -> bool {
contains_target_kind(package, &TargetKind::Bin)
}
fn contains_library(package: &Package) -> bool {
contains_target_kind(package, &TargetKind::Lib)
}
fn contains_target_kind(package: &Package, target_kind: &TargetKind) -> bool {
package.targets.iter().any(|t| t.kind.contains(target_kind))
}
fn get_package_files(
package_path: &Utf8Path,
repository: &Repo,
) -> anyhow::Result<HashSet<Utf8PathBuf>> {
let repository_dir = repository.directory();
crate::get_cargo_package_files(package_path)?
.into_iter()
.filter(|file| file != "Cargo.toml.orig" && file != ".cargo_vcs_info.json")
.map(|file| {
let file_path = package_path.join(file);
let normalized = fs_utils::canonicalize_utf8(&file_path)?;
let relative_path = normalized
.strip_prefix(repository_dir)
.with_context(|| format!("failed to strip {repository_dir} from {normalized}"))?;
Ok(relative_path.to_path_buf())
})
.collect()
}
fn is_commit_too_old(
repository: &Repo,
tag_commit: Option<&str>,
published_at_commit: Option<&str>,
current_commit_hash: &str,
) -> bool {
if let Some(tag_commit) = tag_commit.as_ref()
&& repository.is_ancestor(current_commit_hash, tag_commit)
{
debug!(
"stopping looking at git history because the current commit ({}) is an ancestor of the commit ({}) tagged with the previous version.",
current_commit_hash, tag_commit
);
return true;
}
if let Some(published_commit) = published_at_commit.as_ref()
&& repository.is_ancestor(current_commit_hash, published_commit)
{
debug!(
"stopping looking at git history because the current commit ({}) is an ancestor of the commit ({}) where the previous version was published.",
current_commit_hash, published_commit
);
return true;
}
false
}
fn pathbufs_to_check(
package_path: &Utf8Path,
package: &Package,
) -> anyhow::Result<Vec<Utf8PathBuf>> {
let mut paths = vec![package_path.to_path_buf()];
if let Some(readme_path) = crate::local_readme_override(package, package_path)? {
paths.push(readme_path);
}
Ok(paths)
}
fn get_changelog(
commits: &[Commit],
next_version: &Version,
changelog_req: Option<ChangelogRequest>,
old_changelog: Option<&str>,
repo_url: Option<&RepoUrl>,
release_link: Option<&str>,
package: &Package,
) -> anyhow::Result<(String, String)> {
let commits: Vec<git_cliff_core::commit::Commit> =
commits.iter().map(|c| c.to_cliff_commit()).collect();
let mut changelog_builder = ChangelogBuilder::new(
commits.clone(),
next_version.to_string(),
package.name.to_string(),
);
if let Some(changelog_req) = changelog_req {
if let Some(release_date) = changelog_req.release_date {
changelog_builder = changelog_builder.with_release_date(release_date);
}
if let Some(config) = changelog_req.changelog_config {
changelog_builder = changelog_builder.with_config(config);
}
if let Some(link) = release_link {
changelog_builder = changelog_builder.with_release_link(link);
}
if let Some(repo_url) = repo_url {
let remote = Remote {
owner: repo_url.owner.clone(),
repo: repo_url.name.clone(),
link: repo_url.full_host(),
contributors: get_contributors(&commits),
};
changelog_builder = changelog_builder.with_remote(remote);
let pr_link = repo_url.git_pr_link();
changelog_builder = changelog_builder.with_pr_link(pr_link);
}
let is_package_published = next_version != &package.version;
let last_version = old_changelog.and_then(|old_changelog| {
changelog_parser::last_version_from_str(old_changelog)
.ok()
.flatten()
});
if is_package_published {
let last_version = last_version.unwrap_or(package.version.to_string());
changelog_builder = changelog_builder.with_previous_version(last_version);
} else if let Some(last_version) = last_version
&& let Some(old_changelog) = old_changelog
&& last_version == next_version.to_string()
{
return Ok((old_changelog.to_string(), String::new()));
}
}
let new_changelog = changelog_builder.build();
let changelog = match old_changelog {
Some(old_changelog) => new_changelog.prepend(old_changelog)?,
None => new_changelog.generate()?, };
let body_only =
new_changelog_entry(changelog_builder).context("can't determine changelog body")?;
Ok((changelog, body_only.unwrap_or_default()))
}
fn new_changelog_entry(changelog_builder: ChangelogBuilder) -> anyhow::Result<Option<String>> {
changelog_builder
.config()
.cloned()
.map(|c| {
let new_config = Config {
changelog: ChangelogConfig {
header: Some(String::new()),
footer: Some(String::new()),
..c.changelog
},
..c
};
let changelog = changelog_builder.with_config(new_config).build();
changelog.generate().map(|entry| entry.trim().to_string())
})
.transpose()
}
fn get_contributors(commits: &[git_cliff_core::commit::Commit]) -> Vec<RemoteContributor> {
let mut unique_contributors = HashSet::new();
commits
.iter()
.filter_map(|c| c.remote.clone())
.filter(|remote| unique_contributors.insert(remote.username.clone()))
.collect()
}
fn get_package_path(
package: &Package,
repository: &Repo,
project_root: &Utf8Path,
) -> anyhow::Result<Utf8PathBuf> {
let package_path = package.package_path()?;
get_repo_path(package_path, repository, project_root)
}
fn get_repo_path(
old_path: &Utf8Path,
repository: &Repo,
project_root: &Utf8Path,
) -> anyhow::Result<Utf8PathBuf> {
let relative_path = fs_utils::strip_prefix(old_path, project_root)
.context("error while retrieving package_path")?;
let result_path = repository.directory().join(relative_path);
Ok(result_path)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn same_version_is_not_added_to_changelog() {
let commits = vec![
Commit::new(crate::NO_COMMIT_ID.to_string(), "fix: myfix".to_string()),
Commit::new(crate::NO_COMMIT_ID.to_string(), "simple update".to_string()),
];
let next_version = Version::new(1, 1, 0);
let changelog_req = ChangelogRequest::default();
let old = r"## [1.1.0] - 1970-01-01
### fix bugs
- my awesomefix
### other
- complex update
";
let new = get_changelog(
&commits,
&next_version,
Some(changelog_req),
Some(old),
None,
None,
&fake_package::FakePackage::new("my_package").into(),
)
.unwrap();
assert_eq!(old, new.0);
}
}