#![doc(
html_logo_url = "https://raw.githubusercontent.com/orhun/git-cliff/main/website/static/img/git-cliff.png",
html_favicon_url = "https://raw.githubusercontent.com/orhun/git-cliff/main/website/static/favicon/favicon.ico"
)]
pub mod args;
pub mod logger;
use std::env;
use std::fs::{self, File};
use std::io::{self, Write};
use std::path::{Path, PathBuf};
use std::time::{SystemTime, UNIX_EPOCH};
use args::{BumpOption, Opt, Sort, Strip};
use clap::ValueEnum;
use git_cliff_core::changelog::Changelog;
use git_cliff_core::commit::{Commit, Range};
use git_cliff_core::config::{CommitParser, Config};
use git_cliff_core::embed::{BuiltinConfig, EmbeddedConfig};
use git_cliff_core::error::{Error, Result};
use git_cliff_core::release::Release;
use git_cliff_core::repo::{Repository, SubmoduleRange};
use git_cliff_core::{DEFAULT_CONFIG, IGNORE_FILE};
use glob::Pattern;
#[cfg(feature = "update-informer")]
pub fn check_new_version() {
use update_informer::Check;
let pkg_name = env!("CARGO_PKG_NAME");
let pkg_version = env!("CARGO_PKG_VERSION");
let informer = update_informer::new(update_informer::registry::Crates, pkg_name, pkg_version);
if let Some(new_version) = informer.check_version().ok().flatten() {
if new_version.semver().pre.is_empty() {
tracing::info!(
"A new version of {pkg_name} is available: v{pkg_version} -> {new_version}",
);
}
}
}
fn determine_commit_range(
args: &Opt,
config: &Config,
repository: &Repository,
) -> Result<Option<String>> {
let tags = repository.tags(
&config.git.tag_pattern,
args.topo_order,
args.use_branch_tags,
)?;
let mut commit_range = args.range.clone();
if args.unreleased {
if let Some(last_tag) = tags.last().map(|(k, _)| k) {
commit_range = Some(format!("{last_tag}..HEAD"));
}
} else if args.latest || args.current {
if tags.len() < 2 {
let commits = repository.commits(None, None, None, config.git.topo_order_commits)?;
if let (Some(tag1), Some(tag2)) = (
commits.last().map(|c| c.id().to_string()),
tags.get_index(0).map(|(k, _)| k),
) {
if tags.len() == 1 {
commit_range = Some(tag2.to_owned());
} else {
commit_range = Some(format!("{tag1}..{tag2}"));
}
}
} else {
let mut tag_index = tags.len() - 2;
if args.current {
if let Some(current_tag_index) = repository.current_tag().as_ref().and_then(|tag| {
tags.iter()
.enumerate()
.find(|(_, (_, v))| v.name == tag.name)
.map(|(i, _)| i)
}) {
match current_tag_index.checked_sub(1) {
Some(i) => tag_index = i,
None => {
return Err(Error::ChangelogError(String::from(
"No suitable tags found. Maybe run with '--topo-order'?",
)));
}
}
} else {
return Err(Error::ChangelogError(String::from(
"No tag exists for the current commit",
)));
}
}
if let (Some(tag1), Some(tag2)) = (
tags.get_index(tag_index).map(|(k, _)| k),
tags.get_index(tag_index + 1).map(|(k, _)| k),
) {
commit_range = Some(format!("{tag1}..{tag2}"));
}
}
}
Ok(commit_range)
}
fn process_submodules(
repository: &'static Repository,
release: &mut Release,
topo_order_commits: bool,
) -> Result<()> {
let first_commit = release
.previous
.as_ref()
.and_then(|previous_release| previous_release.commit_id.clone())
.and_then(|commit_id| repository.find_commit(&commit_id));
let last_commit = release
.commit_id
.clone()
.and_then(|commit_id| repository.find_commit(&commit_id));
tracing::debug!("Processing submodule commits in {first_commit:?}..{last_commit:?}");
if let Some(last_commit) = last_commit {
let submodule_ranges = repository.submodules_range(first_commit.as_ref(), &last_commit)?;
let submodule_commits = submodule_ranges.iter().filter_map(|submodule_range| {
let SubmoduleRange {
repository: sub_repo,
range: range_str,
} = submodule_range;
let commits = sub_repo
.commits(Some(range_str), None, None, topo_order_commits)
.ok()
.map(|commits| commits.iter().map(Commit::from).collect());
let submodule_path = sub_repo.path().to_string_lossy().into_owned();
Some(submodule_path).zip(commits)
});
for (submodule_path, commits) in submodule_commits {
release.submodule_commits.insert(submodule_path, commits);
}
}
Ok(())
}
pub fn init_config(name: Option<&str>, config_path: &Path) -> Result<()> {
let contents = match name {
Some(name) => BuiltinConfig::get_config(name.to_string())?,
None => EmbeddedConfig::get_config()?,
};
let config_path = if config_path == Path::new(DEFAULT_CONFIG) {
PathBuf::from(DEFAULT_CONFIG)
} else {
config_path.to_path_buf()
};
tracing::info!(
"Saving the configuration file{} to {}",
name.map(|v| format!(" ({v})")).unwrap_or_default(),
config_path.display(),
);
fs::write(config_path, contents)?;
Ok(())
}
fn process_repository<'a>(
repository: &'static Repository,
config: &mut Config,
args: &Opt,
) -> Result<Vec<Release<'a>>> {
let mut tags = repository.tags(
&config.git.tag_pattern,
args.topo_order,
args.use_branch_tags,
)?;
let skip_regex = config.git.skip_tags.as_ref();
let ignore_regex = config.git.ignore_tags.as_ref();
let count_tags = config.git.count_tags.as_ref();
let recurse_submodules = config.git.recurse_submodules.unwrap_or(false);
tags.retain(|_, tag| {
let name = &tag.name;
let skip = skip_regex.is_some_and(|r| r.is_match(name));
if skip {
return true;
}
let count = count_tags.is_none_or(|r| {
let count_tag = r.is_match(name);
if count_tag {
tracing::debug!("Counting release: {name}");
}
count_tag
});
let ignore = ignore_regex.is_some_and(|r| {
if r.as_str().trim().is_empty() {
return false;
}
let ignore_tag = r.is_match(name);
if ignore_tag {
tracing::debug!("Ignoring release: {name}");
}
ignore_tag
});
count && !ignore
});
if !config.remote.is_any_set() {
match repository.upstream_remote() {
Ok(remote) => {
if !config.remote.github.is_set() {
tracing::debug!("No GitHub remote is set, using remote: {remote}");
config.remote.github.owner = remote.owner;
config.remote.github.repo = remote.repo;
config.remote.github.is_custom = remote.is_custom;
} else if !config.remote.gitlab.is_set() {
tracing::debug!("No GitLab remote is set, using remote: {remote}");
config.remote.gitlab.owner = remote.owner;
config.remote.gitlab.repo = remote.repo;
config.remote.gitlab.is_custom = remote.is_custom;
} else if !config.remote.gitea.is_set() {
tracing::debug!("No Gitea remote is set, using remote: {remote}");
config.remote.gitea.owner = remote.owner;
config.remote.gitea.repo = remote.repo;
config.remote.gitea.is_custom = remote.is_custom;
} else if !config.remote.bitbucket.is_set() {
tracing::debug!("No Bitbucket remote is set, using remote: {remote}");
config.remote.bitbucket.owner = remote.owner;
config.remote.bitbucket.repo = remote.repo;
config.remote.bitbucket.is_custom = remote.is_custom;
}
}
Err(e) => {
tracing::debug!("Failed to get remote from repository: {e:?}");
}
}
}
if args.use_native_tls {
config.remote.enable_native_tls();
}
tracing::trace!("Arguments: {args:#?}");
tracing::trace!("Config: {config:#?}");
let commit_range = determine_commit_range(args, config, repository)?;
let cwd = env::current_dir()?;
let mut include_path = config.git.include_paths.clone();
if let Ok(root) = repository.root_path() {
if cwd.starts_with(&root) &&
cwd != root &&
args.repository.as_ref().is_none_or(Vec::is_empty) &&
args.workdir.is_none() &&
include_path.is_empty()
{
let path = cwd.join("**").join("*");
if let Ok(stripped) = path.strip_prefix(root) {
tracing::info!(
"Including changes from the current directory: {}",
cwd.display()
);
include_path = vec![Pattern::new(stripped.to_string_lossy().as_ref())?];
}
}
}
let include_path = (!include_path.is_empty()).then_some(include_path);
let exclude_path =
(!config.git.exclude_paths.is_empty()).then_some(config.git.exclude_paths.clone());
let mut commits = repository.commits(
commit_range.as_deref(),
include_path,
exclude_path,
config.git.topo_order_commits,
)?;
if let Some(commit_limit_value) = config.git.limit_commits {
commits.truncate(commit_limit_value);
}
let mut releases = vec![Release::default()];
let mut tag_timestamp = None;
if let Some(ref tag) = args.tag {
if let Some(commit_id) = commits.first().map(|c| c.id().to_string()) {
match tags.get(&commit_id) {
Some(tag) => {
tracing::warn!("There is already a tag ({}) for {}", tag.name, commit_id);
tag_timestamp = Some(commits[0].time().seconds());
}
None => {
tags.insert(commit_id, repository.resolve_tag(tag));
}
}
} else {
releases[0].version = Some(tag.clone());
releases[0].timestamp = Some(
SystemTime::now()
.duration_since(UNIX_EPOCH)?
.as_secs()
.try_into()?,
);
}
}
let mut previous_release = Release::default();
let mut first_processed_tag = None;
let repository_path = repository.root_path()?.to_string_lossy().into_owned();
for git_commit in commits.iter().rev() {
let release = releases.last_mut().unwrap();
let mut commit = Commit::from(git_commit);
commit.statistics = repository.commit_statistics(git_commit)?;
let commit_id = commit.id.clone();
release.commits.push(commit);
release.repository = Some(repository_path.clone());
release.commit_id = Some(commit_id);
if let Some(tag) = tags.get(release.commit_id.as_ref().unwrap()) {
release.version = Some(tag.name.clone());
release.message.clone_from(&tag.message);
release.timestamp = if args.tag.as_deref() == Some(tag.name.as_str()) {
match tag_timestamp {
Some(timestamp) => Some(timestamp),
None => Some(
SystemTime::now()
.duration_since(UNIX_EPOCH)?
.as_secs()
.try_into()?,
),
}
} else {
Some(git_commit.time().seconds())
};
if first_processed_tag.is_none() {
first_processed_tag = Some(tag);
}
previous_release.previous = None;
release.previous = Some(Box::new(previous_release));
previous_release = release.clone();
releases.push(Release::default());
}
}
debug_assert!(!releases.is_empty());
if releases.len() > 1 {
previous_release.previous = None;
releases.last_mut().unwrap().previous = Some(Box::new(previous_release));
}
if args.sort == Sort::Newest {
for release in &mut releases {
release.commits.reverse();
}
}
if let Some(custom_commits) = &args.with_commit {
releases
.last_mut()
.unwrap()
.commits
.extend(custom_commits.iter().cloned().map(Commit::from));
}
if releases[0]
.previous
.as_ref()
.and_then(|p| p.version.as_ref())
.is_none()
{
let first_tag = first_processed_tag
.map(|tag| {
tags.iter()
.enumerate()
.find(|(_, (_, v))| v.name == tag.name)
.and_then(|(i, _)| i.checked_sub(1))
.and_then(|i| tags.get_index(i))
})
.or_else(|| Some(tags.last()))
.flatten();
if let Some((commit_id, tag)) = first_tag {
let previous_release = Release {
commit_id: Some(commit_id.clone()),
version: Some(tag.name.clone()),
timestamp: Some(
repository
.find_commit(commit_id)
.map(|v| v.time().seconds())
.unwrap_or_default(),
),
..Default::default()
};
releases[0].previous = Some(Box::new(previous_release));
}
}
for release in &mut releases {
if !release.commits.is_empty() {
release.commit_range = Some(match args.sort {
Sort::Oldest => Range::new(
release.commits.first().unwrap(),
release.commits.last().unwrap(),
),
Sort::Newest => Range::new(
release.commits.last().unwrap(),
release.commits.first().unwrap(),
),
});
}
if recurse_submodules {
process_submodules(repository, release, config.git.topo_order_commits)?;
}
}
if let Some(message) = &args.with_tag_message {
if let Some(latest_release) = releases
.iter_mut()
.rfind(|release| !release.commits.is_empty())
{
latest_release.message = Some(message.to_owned());
}
}
Ok(releases)
}
pub fn run<'a>(args: Opt) -> Result<Changelog<'a>> {
run_with_changelog_modifier(args, |_| Ok(()))
}
pub fn run_with_changelog_modifier<'a>(
mut args: Opt,
changelog_modifier: impl FnOnce(&mut Changelog) -> Result<()>,
) -> Result<Changelog<'a>> {
let builtin_config = BuiltinConfig::parse(args.config.to_string_lossy().to_string());
if let Some(ref workdir) = args.workdir {
args.config = workdir.join(args.config);
match args.repository.as_mut() {
Some(repository) => {
repository
.iter_mut()
.for_each(|r| *r = workdir.join(r.clone()));
}
None => args.repository = Some(vec![workdir.clone()]),
}
if let Some(changelog) = args.prepend {
args.prepend = Some(workdir.join(changelog));
}
args.include_path = Some(vec![Pattern::new(
workdir.join("").to_string_lossy().as_ref(),
)?]);
}
let mut path = args.config.clone();
if !path.exists() {
if let Some(config_path) = Config::retrieve_user_config_path() {
path = config_path;
}
}
let mut config = if let Some(url) = &args.config_url {
tracing::debug!("Using configuration file from: {url}");
#[cfg(feature = "remote")]
{
reqwest::blocking::get(url.clone())?
.error_for_status()?
.text()?
.parse()?
}
#[cfg(not(feature = "remote"))]
unreachable!("This option is not available without the 'remote' build-time feature");
} else if let Ok((config, name)) = builtin_config {
tracing::info!("Using built-in configuration file: {name}");
config
} else if path.exists() {
Config::load(&path)?
} else if let Some(contents) = Config::read_from_manifest()? {
contents.parse()?
} else if let Some(discovered_path) = env::current_dir()?
.ancestors()
.find_map(Config::retrieve_project_config_path)
{
tracing::info!(
"Using configuration from parent directory: {}",
discovered_path.display()
);
Config::load(&discovered_path)?
} else {
#[allow(clippy::unnecessary_debug_formatting)]
if !args.context {
tracing::warn!(
"{:?} is not found, using the default configuration",
args.config
);
}
EmbeddedConfig::parse()?
};
let output = args.output.clone().or(config.changelog.output.clone());
match args.strip {
Some(Strip::Header) => {
config.changelog.header = None;
}
Some(Strip::Footer) => {
config.changelog.footer = None;
}
Some(Strip::All) => {
config.changelog.header = None;
config.changelog.footer = None;
}
None => {}
}
if args.prepend.is_some() {
config.changelog.footer = None;
if !(args.unreleased || args.latest || args.range.is_some()) {
return Err(Error::ArgumentError(String::from(
"'-u' or '-l' is not specified",
)));
}
}
if output.is_some() && args.prepend.is_some() && output.as_ref() == args.prepend.as_ref() {
return Err(Error::ArgumentError(String::from(
"'-o' and '-p' can only be used together if they point to different files",
)));
}
if let Some(body) = args.body.clone() {
config.changelog.body = body;
}
if args.sort == Sort::Oldest {
args.sort = Sort::from_str(&config.git.sort_commits, true)
.expect("Incorrect config value for 'sort_commits'");
}
if !args.topo_order {
args.topo_order = config.git.topo_order;
}
if !args.use_branch_tags {
args.use_branch_tags = config.git.use_branch_tags;
}
if args.github_token.is_some() {
config.remote.github.token.clone_from(&args.github_token);
}
if args.gitlab_token.is_some() {
config.remote.gitlab.token.clone_from(&args.gitlab_token);
}
if args.gitea_token.is_some() {
config.remote.gitea.token.clone_from(&args.gitea_token);
}
if args.bitbucket_token.is_some() {
config
.remote
.bitbucket
.token
.clone_from(&args.bitbucket_token);
}
if args.azure_devops_token.is_some() {
config
.remote
.azure_devops
.token
.clone_from(&args.azure_devops_token);
}
if args.offline {
config.remote.offline = args.offline;
}
if let Some(ref remote) = args.github_repo {
config.remote.github.owner.clone_from(&remote.0.owner);
config.remote.github.repo.clone_from(&remote.0.repo);
config.remote.github.is_custom = true;
}
if let Some(ref remote) = args.gitlab_repo {
config.remote.gitlab.owner.clone_from(&remote.0.owner);
config.remote.gitlab.repo.clone_from(&remote.0.repo);
config.remote.gitlab.is_custom = true;
}
if let Some(ref remote) = args.bitbucket_repo {
config.remote.bitbucket.owner.clone_from(&remote.0.owner);
config.remote.bitbucket.repo.clone_from(&remote.0.repo);
config.remote.bitbucket.is_custom = true;
}
if let Some(ref remote) = args.gitea_repo {
config.remote.gitea.owner.clone_from(&remote.0.owner);
config.remote.gitea.repo.clone_from(&remote.0.repo);
config.remote.gitea.is_custom = true;
}
if let Some(ref remote) = args.azure_devops_repo {
config.remote.azure_devops.owner.clone_from(&remote.0.owner);
config.remote.azure_devops.repo.clone_from(&remote.0.repo);
config.remote.azure_devops.is_custom = true;
}
if args.no_exec {
config
.git
.commit_preprocessors
.iter_mut()
.for_each(|v| v.replace_command = None);
config
.changelog
.postprocessors
.iter_mut()
.for_each(|v| v.replace_command = None);
}
if args.skip_tags.is_some() {
config.git.skip_tags.clone_from(&args.skip_tags);
}
config.git.skip_tags = config.git.skip_tags.filter(|r| !r.as_str().is_empty());
if args.tag_pattern.is_some() {
config.git.tag_pattern.clone_from(&args.tag_pattern);
}
if args.tag.is_some() {
config.bump.initial_tag.clone_from(&args.tag);
}
if args.ignore_tags.is_some() {
config.git.ignore_tags.clone_from(&args.ignore_tags);
}
if args.count_tags.is_some() {
config.git.count_tags.clone_from(&args.count_tags);
}
if let Some(include_path) = &args.include_path {
config
.git
.include_paths
.extend(include_path.iter().cloned());
}
if let Some(exclude_path) = &args.exclude_path {
config
.git
.exclude_paths
.extend(exclude_path.iter().cloned());
}
if let Some(BumpOption::Specific(bump_type)) = args.bump {
config.bump.bump_type = Some(bump_type);
}
let mut changelog: Changelog = if let Some(context_path) = args.from_context {
let mut input: Box<dyn io::Read> = if context_path == Path::new("-") {
Box::new(io::stdin())
} else {
Box::new(File::open(context_path)?)
};
let mut changelog = Changelog::from_context(&mut input, config)?;
changelog.add_remote_context()?;
changelog
} else {
let repositories: Vec<Repository> = if let Some(paths) = &args.repository {
paths
.iter()
.map(|p| {
let abs_path = fs::canonicalize(p)?;
Repository::discover(abs_path)
})
.collect::<Result<Vec<_>>>()?
} else {
let cwd = env::current_dir()?;
vec![Repository::discover(cwd)?]
};
let mut releases = Vec::<Release>::new();
let mut commit_range = None;
for repository in repositories {
let mut skip_list = Vec::new();
let ignore_file = repository.root_path()?.join(IGNORE_FILE);
if ignore_file.exists() {
let contents = fs::read_to_string(ignore_file)?;
let commits = contents
.lines()
.filter(|v| !(v.starts_with('#') || v.trim().is_empty()))
.map(|v| String::from(v.trim()))
.collect::<Vec<String>>();
skip_list.extend(commits);
}
if let Some(ref skip_commit) = args.skip_commit {
skip_list.extend(skip_commit.clone());
}
for sha1 in skip_list {
config.git.commit_parsers.insert(0, CommitParser {
sha: Some(sha1.clone()),
skip: Some(true),
..Default::default()
});
}
commit_range = determine_commit_range(&args, &config, &repository)?;
releases.extend(process_repository(
Box::leak(Box::new(repository)),
&mut config,
&args,
)?);
}
Changelog::new(releases, config, commit_range.as_deref())?
};
changelog_modifier(&mut changelog)?;
Ok(changelog)
}
pub fn write_changelog<W: io::Write>(
args: &Opt,
mut changelog: Changelog<'_>,
mut out: W,
) -> Result<()> {
let output = args
.output
.clone()
.or(changelog.config.changelog.output.clone());
if args.bump.is_some() || args.bumped_version {
let current_version = changelog.releases.first().and_then(|release| {
release.version.clone().or_else(|| {
release
.previous
.as_ref()
.and_then(|previous| previous.version.clone())
})
});
let next_version = if let Some(next_version) = changelog.bump_version()? {
if current_version.as_ref() == Some(&next_version) {
tracing::warn!(
"The next version is the same as the current version, there is nothing to bump"
);
}
next_version
} else if let Some(last_version) =
changelog.releases.first().cloned().and_then(|v| v.version)
{
tracing::warn!("There is nothing to bump");
last_version
} else if changelog.releases.is_empty() {
changelog.config.bump.get_initial_tag()
} else {
return Ok(());
};
if let Some(tag_pattern) = &changelog.config.git.tag_pattern {
if !tag_pattern.is_match(&next_version) {
return Err(Error::ChangelogError(format!(
"Next version ({next_version}) does not match the tag pattern: {tag_pattern}",
)));
}
}
if args.bumped_version {
if changelog.config.changelog.output.is_none() {
writeln!(out, "{next_version}")?;
} else {
writeln!(io::stdout(), "{next_version}")?;
}
return Ok(());
}
}
if args.context {
changelog.write_context(&mut out)?;
return Ok(());
}
if let Some(path) = &args.prepend {
let changelog_before = fs::read_to_string(path)?;
let mut out = io::BufWriter::new(File::create(path)?);
changelog.prepend(changelog_before, &mut out)?;
}
if output.is_some() || args.prepend.is_none() {
changelog.generate(&mut out)?;
}
Ok(())
}