use std::fmt::Write;
use std::ops::Range;
use std::path::{Path, PathBuf};
use std::process::Stdio;
use std::sync::OnceLock;
use std::time::{SystemTime, UNIX_EPOCH};
use annotate_snippets::{AnnotationKind, Level, Renderer, Snippet, renderer::DecorStyle};
use anyhow::{Context, Result};
use futures::{StreamExt, TryStreamExt};
use itertools::Itertools;
use lazy_regex::regex;
use owo_colors::OwoColorize;
use prek_consts::PRE_COMMIT_HOOKS_YAML;
use prek_consts::env_vars::EnvVars;
use rustc_hash::FxHashMap;
use rustc_hash::FxHashSet;
use semver::Version;
use toml_edit::DocumentMut;
use tracing::{debug, trace, warn};
use crate::cli::ExitStatus;
use crate::cli::reporter::AutoUpdateReporter;
use crate::cli::run::Selectors;
use crate::config::{Repo, looks_like_sha};
use crate::fs::{CWD, Simplified};
use crate::printer::Printer;
use crate::run::CONCURRENCY;
use crate::store::Store;
use crate::workspace::{Project, Workspace};
use crate::yaml::serialize_yaml_scalar;
use crate::{config, git};
#[derive(Default, Clone)]
struct Revision {
rev: String,
frozen: Option<String>,
}
struct RepoUsage<'a> {
project: &'a Project,
remote_count: usize,
remote_index: usize,
rev_line_number: usize,
current_frozen: Option<String>,
current_frozen_site: Option<FrozenCommentSite>,
}
struct RepoTarget<'a> {
repo: &'a str,
current_rev: &'a str,
required_hook_ids: Vec<&'a str>,
usages: Vec<RepoUsage<'a>>,
}
struct RepoSource<'a> {
repo: &'a str,
targets: Vec<RepoTarget<'a>>,
}
enum FrozenMismatchAction {
ReplaceWith(String),
Remove,
NoReplacement,
}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
enum CommitPresence {
Present,
Absent,
Unknown,
}
enum FrozenMismatchReason {
ResolvesToDifferentCommit,
Unresolvable,
}
struct FrozenMismatch<'a> {
project: &'a Project,
remote_size: usize,
remote_index: usize,
rev_line_number: usize,
current_frozen: String,
frozen_site: Option<FrozenCommentSite>,
reason: FrozenMismatchReason,
current_rev_presence: CommitPresence,
action: FrozenMismatchAction,
}
#[derive(Clone)]
struct FrozenCommentSite {
line_number: usize,
source_line: String,
span: Range<usize>,
}
#[derive(Clone)]
struct FrozenRef {
line_number: usize,
current_frozen: Option<String>,
site: Option<FrozenCommentSite>,
}
struct TagTimestamp {
tag: String,
timestamp: u64,
commit: String,
}
struct ResolvedRepoUpdate<'a> {
revision: Revision,
frozen_mismatches: Vec<FrozenMismatch<'a>>,
}
struct RepoUpdate<'a> {
target: &'a RepoTarget<'a>,
result: Result<ResolvedRepoUpdate<'a>>,
}
type ProjectUpdates<'a> = FxHashMap<&'a Project, Vec<Option<Revision>>>;
struct ApplyRepoUpdatesResult {
failure: bool,
has_updates: bool,
}
enum DisplayEventKind {
Update { current: Revision, next: Revision },
FrozenUpdate { current: String, next: String },
FrozenRemove { current: String },
UpToDate,
Failure { error: String },
}
#[derive(Clone, Copy, Eq, PartialEq)]
enum DisplayStream {
Stdout,
Stderr,
}
struct DisplayEvent<'a> {
stream: DisplayStream,
project: &'a Project,
repo: &'a str,
remote_index: usize,
line_number: usize,
kind: DisplayEventKind,
}
struct FrozenWarningEvent<'a> {
project: &'a Project,
repo: &'a str,
current_rev: &'a str,
remote_index: usize,
mismatch: &'a FrozenMismatch<'a>,
}
type RepoOccurrences<'a> = FxHashMap<(&'a Path, &'a str), usize>;
#[expect(clippy::fn_params_excessive_bools)]
pub(crate) async fn auto_update(
store: &Store,
config: Option<PathBuf>,
filter_repos: Vec<String>,
bleeding_edge: bool,
freeze: bool,
jobs: usize,
dry_run: bool,
check: bool,
cooldown_days: u8,
printer: Printer,
) -> Result<ExitStatus> {
let workspace_root = Workspace::find_root(config.as_deref(), &CWD)?;
let selectors = Selectors::default();
let workspace = Workspace::discover(store, workspace_root, config, Some(&selectors), true)?;
let repo_sources = collect_repo_sources(&workspace)?;
let jobs = if jobs == 0 { *CONCURRENCY } else { jobs };
let jobs = jobs
.min(if filter_repos.is_empty() {
repo_sources.len()
} else {
filter_repos.len()
})
.max(1);
let reporter = AutoUpdateReporter::new(printer);
let sources = repo_sources.iter().filter(|repo_source| {
filter_repos.is_empty() || filter_repos.iter().any(|repo| repo == repo_source.repo)
});
let outcomes: Vec<RepoUpdate<'_>> = futures::stream::iter(sources)
.map(async |repo_source| {
let progress = reporter.on_update_start(repo_source.repo);
let result =
evaluate_repo_source(repo_source, bleeding_edge, freeze, cooldown_days).await;
reporter.on_update_complete(progress);
result
})
.buffer_unordered(jobs)
.try_collect::<Vec<_>>()
.await?
.into_iter()
.flatten()
.collect();
reporter.on_complete();
warn_frozen_mismatches(&outcomes, printer)?;
#[expect(clippy::mutable_key_type)]
let mut project_updates: ProjectUpdates<'_> = FxHashMap::default();
let apply_result = apply_repo_updates(outcomes, dry_run, printer, &mut project_updates)?;
if !dry_run {
for (project, revisions) in project_updates {
if revisions.iter().any(Option::is_some) {
write_new_config(project.config_file(), &revisions).await?;
}
}
}
if apply_result.failure || (check && apply_result.has_updates) {
return Ok(ExitStatus::Failure);
}
Ok(ExitStatus::Success)
}
fn collect_repo_sources(workspace: &Workspace) -> Result<Vec<RepoSource<'_>>> {
let mut repo_sources: FxHashMap<&str, FxHashMap<(&str, Vec<&str>), RepoTarget<'_>>> =
FxHashMap::default();
for project in workspace.projects() {
let remote_count = project
.config()
.repos
.iter()
.filter(|repo| matches!(repo, Repo::Remote(_)))
.count();
let frozen_refs = read_frozen_refs(project.config_file()).with_context(|| {
format!(
"Failed to read frozen references from `{}`",
project.config_file().user_display()
)
})?;
if frozen_refs.len() != remote_count {
anyhow::bail!(
"Found {} remote repos in `{}` but {} `rev:` entries while checking frozen refs",
remote_count,
project.config_file().user_display(),
frozen_refs.len()
);
}
let mut remote_index = 0;
for repo in &project.config().repos {
let Repo::Remote(remote_repo) = repo else {
continue;
};
let mut required_hook_ids = remote_repo
.hooks
.iter()
.map(|hook| hook.id.as_str())
.collect::<Vec<_>>();
required_hook_ids.sort_unstable();
required_hook_ids.dedup();
let target = repo_sources
.entry(remote_repo.repo.as_str())
.or_default()
.entry((remote_repo.rev.as_str(), required_hook_ids.clone()))
.or_insert_with(|| RepoTarget {
repo: remote_repo.repo.as_str(),
current_rev: remote_repo.rev.as_str(),
required_hook_ids,
usages: Vec::new(),
});
target.usages.push(RepoUsage {
project,
remote_count,
remote_index,
rev_line_number: frozen_refs[remote_index].line_number,
current_frozen: frozen_refs[remote_index].current_frozen.clone(),
current_frozen_site: frozen_refs[remote_index].site.clone(),
});
remote_index += 1;
}
}
Ok(repo_sources
.into_iter()
.map(|(repo, targets)| {
let mut targets = targets.into_values().collect::<Vec<_>>();
targets.sort_by(|a, b| {
a.current_rev
.cmp(b.current_rev)
.then_with(|| a.required_hook_ids.cmp(&b.required_hook_ids))
});
RepoSource { repo, targets }
})
.collect())
}
fn warn_frozen_mismatches(updates: &[RepoUpdate<'_>], printer: Printer) -> Result<()> {
let mut warnings = Vec::new();
for update in updates {
let Ok(resolved) = &update.result else {
continue;
};
for mismatch in &resolved.frozen_mismatches {
warnings.push(FrozenWarningEvent {
project: mismatch.project,
repo: update.target.repo,
current_rev: update.target.current_rev,
remote_index: mismatch.remote_index,
mismatch,
});
}
}
warnings.sort_by(|a, b| {
a.project
.idx()
.cmp(&b.project.idx())
.then_with(|| a.remote_index.cmp(&b.remote_index))
});
for warning in warnings {
write!(
printer.stderr(),
"{}",
render_frozen_mismatch_warning(warning.repo, warning.current_rev, warning.mismatch)
)?;
}
Ok(())
}
fn update_verb(dry_run: bool) -> &'static str {
if dry_run { "would update" } else { "updating" }
}
fn remove_verb(dry_run: bool) -> &'static str {
if dry_run { "would remove" } else { "removing" }
}
fn format_revision(rev: &str, frozen: Option<&str>) -> String {
match frozen {
Some(frozen) => format!(
"`{}` {}",
rev.cyan(),
format!("(frozen: {frozen})").dimmed()
),
None => format!("`{}`", rev.cyan()),
}
}
fn format_display_event(kind: &DisplayEventKind, dry_run: bool) -> String {
match kind {
DisplayEventKind::Update { current, next } => format!(
"{} {} -> {}",
format!("{} rev", update_verb(dry_run)).green(),
format_revision(¤t.rev, current.frozen.as_deref()),
format_revision(&next.rev, next.frozen.as_deref())
),
DisplayEventKind::FrozenUpdate { current, next } => format!(
"{} `{}` -> `{}`",
format!("{} frozen comment", update_verb(dry_run)).green(),
current.cyan(),
next.cyan()
),
DisplayEventKind::FrozenRemove { current } => format!(
"{} `{}`",
format!("{} frozen comment", remove_verb(dry_run)).yellow(),
current.cyan()
),
DisplayEventKind::UpToDate => "already up to date".dimmed().to_string(),
DisplayEventKind::Failure { error } => {
format!("{} {error}", "update failed:".red())
}
}
}
fn write_display_events(
events: &mut [DisplayEvent<'_>],
repo_occurrences: &RepoOccurrences<'_>,
dry_run: bool,
printer: Printer,
) -> Result<()> {
if events.is_empty() {
return Ok(());
}
events.sort_by(|a, b| {
a.project
.idx()
.cmp(&b.project.idx())
.then_with(|| a.remote_index.cmp(&b.remote_index))
});
for stream in [DisplayStream::Stdout, DisplayStream::Stderr] {
let stream_events = events
.iter()
.filter(|event| event.stream == stream)
.collect::<Vec<_>>();
if stream_events.is_empty() {
continue;
}
let show_project_headers = stream_events
.iter()
.map(|event| event.project.config_file())
.unique()
.count()
> 1;
let mut current_project: Option<&Path> = None;
let mut current_repo: Option<(&Path, &str)> = None;
let mut output = String::new();
for event in stream_events {
let project = event.project.config_file();
if show_project_headers && current_project != Some(project) {
if current_project.is_some() {
writeln!(output)?;
}
writeln!(
output,
"{}",
format!("{}", project.user_display()).yellow().bold()
)?;
current_project = Some(project);
current_repo = None;
}
let repo_key = (project, event.repo);
if current_repo != Some(repo_key) {
if current_repo.is_some() {
writeln!(output)?;
}
let indent = if show_project_headers { " " } else { "" };
writeln!(output, "{}{}", indent, event.repo.cyan().bold())?;
current_repo = Some(repo_key);
}
let indent = if show_project_headers { " " } else { " " };
let line_prefix = if repo_occurrences[&repo_key] > 1 {
format!("{} ", format!("line {}:", event.line_number).dimmed())
} else {
String::new()
};
writeln!(
output,
"{}{}{}",
indent,
line_prefix,
format_display_event(&event.kind, dry_run)
)?;
}
match stream {
DisplayStream::Stdout => write!(printer.stdout(), "{output}")?,
DisplayStream::Stderr => write!(printer.stderr(), "{output}")?,
}
}
Ok(())
}
#[expect(clippy::mutable_key_type)]
fn apply_repo_updates<'a>(
updates: Vec<RepoUpdate<'a>>,
dry_run: bool,
printer: Printer,
project_updates: &mut ProjectUpdates<'a>,
) -> Result<ApplyRepoUpdatesResult> {
let mut failure = false;
let mut has_updates = false;
let mut display_events = Vec::new();
for update in updates {
match update.result {
Ok(resolved) => {
let is_changed = update.target.current_rev != resolved.revision.rev;
let has_frozen_updates = resolved.frozen_mismatches.iter().any(|mismatch| {
!matches!(mismatch.action, FrozenMismatchAction::NoReplacement)
});
let has_frozen_notice = !resolved.frozen_mismatches.is_empty();
has_updates |= is_changed || has_frozen_updates;
if is_changed {
for usage in &update.target.usages {
display_events.push(DisplayEvent {
stream: DisplayStream::Stdout,
project: usage.project,
repo: update.target.repo,
remote_index: usage.remote_index,
line_number: usage.rev_line_number,
kind: DisplayEventKind::Update {
current: Revision {
rev: update.target.current_rev.to_string(),
frozen: usage.current_frozen.clone(),
},
next: resolved.revision.clone(),
},
});
record_project_revision(
project_updates,
usage.project,
usage.remote_count,
usage.remote_index,
resolved.revision.clone(),
);
}
} else {
for mismatch in &resolved.frozen_mismatches {
match &mismatch.action {
FrozenMismatchAction::ReplaceWith(replacement) => {
display_events.push(DisplayEvent {
stream: DisplayStream::Stdout,
project: mismatch.project,
repo: update.target.repo,
remote_index: mismatch.remote_index,
line_number: mismatch.rev_line_number,
kind: DisplayEventKind::FrozenUpdate {
current: mismatch.current_frozen.clone(),
next: replacement.clone(),
},
});
record_project_revision(
project_updates,
mismatch.project,
mismatch.remote_size,
mismatch.remote_index,
Revision {
rev: update.target.current_rev.to_string(),
frozen: Some(replacement.clone()),
},
);
}
FrozenMismatchAction::Remove => {
display_events.push(DisplayEvent {
stream: DisplayStream::Stdout,
project: mismatch.project,
repo: update.target.repo,
remote_index: mismatch.remote_index,
line_number: mismatch.rev_line_number,
kind: DisplayEventKind::FrozenRemove {
current: mismatch.current_frozen.clone(),
},
});
record_project_revision(
project_updates,
mismatch.project,
mismatch.remote_size,
mismatch.remote_index,
Revision {
rev: update.target.current_rev.to_string(),
frozen: None,
},
);
}
FrozenMismatchAction::NoReplacement => {}
}
}
}
if !is_changed && !has_frozen_notice {
for usage in &update.target.usages {
display_events.push(DisplayEvent {
stream: DisplayStream::Stdout,
project: usage.project,
repo: update.target.repo,
remote_index: usage.remote_index,
line_number: usage.rev_line_number,
kind: DisplayEventKind::UpToDate,
});
}
}
}
Err(e) => {
failure = true;
let error = e.to_string();
for usage in &update.target.usages {
display_events.push(DisplayEvent {
stream: DisplayStream::Stderr,
project: usage.project,
repo: update.target.repo,
remote_index: usage.remote_index,
line_number: usage.rev_line_number,
kind: DisplayEventKind::Failure {
error: error.clone(),
},
});
}
}
}
}
let repo_occurrences =
display_events
.iter()
.fold(RepoOccurrences::default(), |mut counts, event| {
*counts
.entry((event.project.config_file(), event.repo))
.or_default() += 1;
counts
});
write_display_events(&mut display_events, &repo_occurrences, dry_run, printer)?;
Ok(ApplyRepoUpdatesResult {
failure,
has_updates,
})
}
#[expect(clippy::mutable_key_type)]
fn record_project_revision<'a>(
project_updates: &mut ProjectUpdates<'a>,
project: &'a Project,
remote_size: usize,
remote_index: usize,
revision: Revision,
) {
let revisions = project_updates
.entry(project)
.or_insert_with(|| vec![None; remote_size]);
revisions[remote_index] = Some(revision);
}
async fn collect_frozen_mismatches<'a>(
repo_path: &Path,
target: &'a RepoTarget<'a>,
tag_timestamps: &[TagTimestamp],
) -> Result<Vec<FrozenMismatch<'a>>> {
if !(target.current_rev.len() == 40 && looks_like_sha(target.current_rev)) {
return Ok(Vec::new());
}
let frozen_refs_to_check = target
.usages
.iter()
.filter_map(|usage| usage.current_frozen.as_deref())
.collect::<FxHashSet<_>>();
if frozen_refs_to_check.is_empty() {
return Ok(Vec::new());
}
let current_rev_presence = is_commit_present(repo_path, target.current_rev).await?;
let rev_tags = get_tags_pointing_at_revision(tag_timestamps, target.current_rev);
let mut resolved_frozen_refs = FxHashMap::default();
for frozen_ref in frozen_refs_to_check {
let resolved = resolve_revision_to_commit(repo_path, frozen_ref).await.ok();
resolved_frozen_refs.insert(frozen_ref, resolved);
}
Ok(target
.usages
.iter()
.filter_map(|usage| {
let current_frozen = usage.current_frozen.as_deref()?;
let frozen_commit = resolved_frozen_refs.get(current_frozen).cloned().flatten();
let reason = match frozen_commit.as_deref() {
Some(frozen_commit) if frozen_commit.eq_ignore_ascii_case(target.current_rev) => {
return None;
}
Some(_) => FrozenMismatchReason::ResolvesToDifferentCommit,
None => FrozenMismatchReason::Unresolvable,
};
let action = match select_best_tag(&rev_tags, current_frozen, true) {
Some(replacement) => FrozenMismatchAction::ReplaceWith(replacement.to_string()),
None => match current_rev_presence {
CommitPresence::Present => FrozenMismatchAction::Remove,
CommitPresence::Absent | CommitPresence::Unknown => {
FrozenMismatchAction::NoReplacement
}
},
};
Some(FrozenMismatch {
project: usage.project,
remote_size: usage.remote_count,
remote_index: usage.remote_index,
rev_line_number: usage.rev_line_number,
current_frozen: current_frozen.to_string(),
frozen_site: usage.current_frozen_site.clone(),
reason,
current_rev_presence,
action,
})
})
.collect())
}
async fn evaluate_repo_source<'a>(
repo_source: &'a RepoSource<'a>,
bleeding_edge: bool,
freeze: bool,
cooldown_days: u8,
) -> Result<Vec<RepoUpdate<'a>>> {
let tmp_dir = tempfile::tempdir()?;
let repo_path = tmp_dir.path();
let result = async {
trace!(
"Cloning repository `{}` to `{}`",
repo_source.repo,
repo_path.display()
);
setup_and_fetch_repo(repo_source.repo, repo_path).await?;
let metadata = list_tag_metadata(repo_path).await?;
anyhow::Ok(metadata)
}
.await;
let tag_timestamps = match result {
Ok(metadata) => metadata,
Err(e) => {
let error = format!("{e:#}");
return Ok(repo_source
.targets
.iter()
.map(|target| RepoUpdate {
target,
result: Err(anyhow::anyhow!(error.clone())),
})
.collect());
}
};
let mut updates = Vec::with_capacity(repo_source.targets.len());
for target in &repo_source.targets {
let result = evaluate_repo_target(
repo_path,
target,
bleeding_edge,
freeze,
cooldown_days,
&tag_timestamps,
)
.await;
updates.push(RepoUpdate { target, result });
}
Ok(updates)
}
async fn evaluate_repo_target<'a>(
repo_path: &Path,
target: &'a RepoTarget<'a>,
bleeding_edge: bool,
freeze: bool,
cooldown_days: u8,
tag_timestamps: &[TagTimestamp],
) -> Result<ResolvedRepoUpdate<'a>> {
let frozen_mismatches = match collect_frozen_mismatches(repo_path, target, tag_timestamps).await
{
Ok(mismatches) => mismatches,
Err(e) => {
warn!(
"Failed to collect frozen comment context for repo `{}`: {e}",
target.repo
);
Vec::new()
}
};
let rev = select_update_revision(
repo_path,
target.current_rev,
bleeding_edge,
cooldown_days,
tag_timestamps,
)
.await?;
let Some(rev) = rev else {
debug!("No suitable revision found for repo `{}`", target.repo);
return Ok(ResolvedRepoUpdate {
revision: Revision {
rev: target.current_rev.to_string(),
frozen: None,
},
frozen_mismatches,
});
};
let (rev, frozen) = if freeze {
let exact = resolve_revision_to_commit(repo_path, &rev).await?;
if rev.eq_ignore_ascii_case(&exact) {
(rev, None)
} else {
debug!("Freezing revision `{rev}` to `{exact}`");
(exact, Some(rev))
}
} else {
(rev, None)
};
checkout_and_validate_manifest(repo_path, &rev, &target.required_hook_ids).await?;
Ok(ResolvedRepoUpdate {
revision: Revision { rev, frozen },
frozen_mismatches,
})
}
async fn setup_and_fetch_repo(repo_url: &str, repo_path: &Path) -> Result<()> {
git::init_repo(repo_url, repo_path).await?;
git::git_cmd("git fetch")?
.arg("fetch")
.arg("origin")
.arg("HEAD")
.arg("--quiet")
.arg("--filter=blob:none")
.arg("--tags")
.current_dir(repo_path)
.remove_git_envs()
.stdout(Stdio::null())
.stderr(Stdio::null())
.status()
.await?;
Ok(())
}
async fn resolve_revision_to_commit(repo_path: &Path, rev: &str) -> Result<String> {
let output = git::git_cmd("git rev-parse")?
.arg("rev-parse")
.arg(format!("{rev}^{{}}"))
.check(true)
.current_dir(repo_path)
.remove_git_envs()
.output()
.await?;
Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
}
async fn is_commit_present(repo_path: &Path, commit: &str) -> Result<CommitPresence> {
static GIT_SUPPORTS_NO_LAZY_FETCH: OnceLock<bool> = OnceLock::new();
if matches!(GIT_SUPPORTS_NO_LAZY_FETCH.get(), Some(false)) {
return Ok(CommitPresence::Unknown);
}
let output = git::git_cmd("git cat-file")?
.arg("--no-lazy-fetch")
.arg("cat-file")
.arg("-e")
.arg(format!("{commit}^{{commit}}"))
.env(EnvVars::LC_ALL, "C")
.check(false)
.current_dir(repo_path)
.remove_git_envs()
.stdout(Stdio::null())
.output()
.await?;
if output.status.success() {
let _ = GIT_SUPPORTS_NO_LAZY_FETCH.set(true);
return Ok(CommitPresence::Present);
}
if no_lazy_fetch_unsupported(&output.stderr) {
let _ = GIT_SUPPORTS_NO_LAZY_FETCH.set(false);
return Ok(CommitPresence::Unknown);
}
let _ = GIT_SUPPORTS_NO_LAZY_FETCH.set(true);
Ok(CommitPresence::Absent)
}
fn no_lazy_fetch_unsupported(stderr: &[u8]) -> bool {
let stderr = String::from_utf8_lossy(stderr);
stderr.contains("--no-lazy-fetch") && stderr.contains("unknown option")
}
fn get_tags_pointing_at_revision<'a>(
tag_timestamps: &'a [TagTimestamp],
rev: &str,
) -> Vec<&'a str> {
tag_timestamps
.iter()
.filter(|tag_timestamp| tag_timestamp.commit.eq_ignore_ascii_case(rev))
.map(|tag_timestamp| tag_timestamp.tag.as_str())
.collect()
}
fn render_frozen_mismatch_warning(
repo: &str,
current_rev: &str,
mismatch: &FrozenMismatch<'_>,
) -> String {
let label = match mismatch.reason {
FrozenMismatchReason::ResolvesToDifferentCommit => {
format!(
"`{}` resolves to a different commit",
mismatch.current_frozen
)
}
FrozenMismatchReason::Unresolvable => {
format!("`{}` could not be resolved", mismatch.current_frozen)
}
};
let details = match &mismatch.action {
FrozenMismatchAction::ReplaceWith(replacement) => Some(format!(
"pinned commit `{current_rev}` is referenced by `{replacement}`"
)),
FrozenMismatchAction::Remove => Some(format!(
"no tag points at the pinned commit `{current_rev}`"
)),
FrozenMismatchAction::NoReplacement
if matches!(mismatch.current_rev_presence, CommitPresence::Absent) =>
{
Some(format!(
"pinned commit `{current_rev}` is not present in the repo"
))
}
FrozenMismatchAction::NoReplacement => None,
};
let title = format!(
"[{repo}] frozen ref `{}` does not match `{current_rev}`",
mismatch.current_frozen
);
let site = mismatch
.frozen_site
.as_ref()
.expect("frozen comment site must exist when rendering a frozen mismatch warning");
let mut report = Level::WARNING.primary_title(title).element(
Snippet::source(&site.source_line)
.line_start(site.line_number)
.path(mismatch.project.config_file().user_display().to_string())
.annotation(AnnotationKind::Primary.span(site.span.clone()).label(label)),
);
if let Some(details) = details {
report = report.element(Level::NOTE.message(details));
}
let renderer = Renderer::styled().decor_style(DecorStyle::Ascii);
format!("{}\n", renderer.render(&[report]))
}
fn parse_frozen_ref(line: &str, line_number: usize) -> FrozenRef {
let Some(captures) = regex!(r#"#\s*frozen:\s*([^\s#]+)"#).captures(line) else {
return FrozenRef {
line_number,
current_frozen: None,
site: None,
};
};
let frozen_match = captures.get(1).expect("capture group 1 must exist");
FrozenRef {
line_number,
current_frozen: Some(frozen_match.as_str().to_string()),
site: Some(FrozenCommentSite {
line_number,
source_line: line.to_string(),
span: frozen_match.start()..frozen_match.end(),
}),
}
}
fn read_frozen_refs(path: &Path) -> Result<Vec<FrozenRef>> {
let content = fs_err::read_to_string(path)?;
match path.extension() {
Some(ext) if ext.eq_ignore_ascii_case("toml") => Ok(content
.lines()
.enumerate()
.filter(|(_, line)| regex!(r#"^\s*rev\s*="#).is_match(line))
.map(|(index, line)| parse_frozen_ref(line, index + 1))
.collect()),
_ => {
let rev_regex = regex!(r#"^\s+rev:\s*['"]?[^\s#]+(?P<comment>.*)$"#);
Ok(content
.lines()
.enumerate()
.filter_map(|(index, line)| {
rev_regex
.captures(line)
.map(|_| parse_frozen_ref(line, index + 1))
})
.collect())
}
}
}
async fn resolve_bleeding_edge(repo_path: &Path) -> Result<Option<String>> {
let output = git::git_cmd("git describe")?
.arg("describe")
.arg("FETCH_HEAD")
.arg("--tags")
.arg("--exact-match")
.check(false)
.current_dir(repo_path)
.remove_git_envs()
.output()
.await?;
let rev = if output.status.success() {
String::from_utf8_lossy(&output.stdout).trim().to_string()
} else {
debug!("No matching tag for `FETCH_HEAD`, using rev-parse instead");
let output = git::git_cmd("git rev-parse")?
.arg("rev-parse")
.arg("FETCH_HEAD")
.check(true)
.current_dir(repo_path)
.remove_git_envs()
.output()
.await?;
String::from_utf8_lossy(&output.stdout).trim().to_string()
};
debug!("Resolved `FETCH_HEAD` to `{rev}`");
Ok(Some(rev))
}
async fn list_tag_metadata(repo: &Path) -> Result<Vec<TagTimestamp>> {
let output = git::git_cmd("git for-each-ref")?
.arg("for-each-ref")
.arg("--sort=-creatordate")
.arg("--format=%(refname:lstrip=2)\t%(creatordate:unix)\t%(objectname)\t%(*objectname)")
.arg("refs/tags")
.check(true)
.current_dir(repo)
.remove_git_envs()
.output()
.await?;
let mut tags: Vec<TagTimestamp> = String::from_utf8_lossy(&output.stdout)
.lines()
.filter_map(|line| {
let mut parts = line.split('\t');
let tag = parts.next()?.trim_ascii();
let ts_str = parts.next()?.trim_ascii();
let object = parts.next()?.trim_ascii();
let peeled = parts.next().unwrap_or_default().trim_ascii();
let ts: u64 = ts_str.parse().ok()?;
let commit = if peeled.is_empty() { object } else { peeled };
Some(TagTimestamp {
tag: tag.to_string(),
timestamp: ts,
commit: commit.to_string(),
})
})
.collect();
tags.sort_by(|tag_a, tag_b| {
tag_b.timestamp.cmp(&tag_a.timestamp).then_with(|| {
let ver_a = Version::parse(tag_a.tag.strip_prefix('v').unwrap_or(&tag_a.tag));
let ver_b = Version::parse(tag_b.tag.strip_prefix('v').unwrap_or(&tag_b.tag));
match (ver_a, ver_b) {
(Ok(a), Ok(b)) => b.cmp(&a).then_with(|| tag_a.tag.cmp(&tag_b.tag)),
(Ok(_), Err(_)) => std::cmp::Ordering::Less,
(Err(_), Ok(_)) => std::cmp::Ordering::Greater,
(Err(_), Err(_)) => tag_a.tag.cmp(&tag_b.tag),
}
})
});
Ok(tags)
}
async fn select_update_revision(
repo_path: &Path,
current_rev: &str,
bleeding_edge: bool,
cooldown_days: u8,
tag_timestamps: &[TagTimestamp],
) -> Result<Option<String>> {
if bleeding_edge {
return resolve_bleeding_edge(repo_path).await;
}
let cutoff_secs = u64::from(cooldown_days) * 86400;
let now = SystemTime::now().duration_since(UNIX_EPOCH)?.as_secs();
let cutoff = now.saturating_sub(cutoff_secs);
let left = match tag_timestamps.binary_search_by(|tag| tag.timestamp.cmp(&cutoff).reverse()) {
Ok(i) | Err(i) => i,
};
let Some(target_tag) = tag_timestamps.get(left) else {
trace!("No tags meet cooldown cutoff {cutoff_secs}s");
return Ok(None);
};
debug!(
"Using tag `{}` cutoff timestamp {}",
target_tag.tag, target_tag.timestamp
);
let tags = get_tags_pointing_at_revision(tag_timestamps, &target_tag.commit);
let best = select_best_tag(&tags, current_rev, false)
.unwrap_or(target_tag.tag.as_str())
.to_string();
debug!(
"Using best candidate tag `{best}` for revision `{}`",
target_tag.tag
);
Ok(Some(best))
}
fn compare_tag_versions_desc(tag_a: &str, tag_b: &str) -> std::cmp::Ordering {
let version_a = Version::parse(tag_a.strip_prefix('v').unwrap_or(tag_a));
let version_b = Version::parse(tag_b.strip_prefix('v').unwrap_or(tag_b));
match (version_a, version_b) {
(Ok(a), Ok(b)) => b.cmp(&a),
(Ok(_), Err(_)) => std::cmp::Ordering::Less,
(Err(_), Ok(_)) => std::cmp::Ordering::Greater,
(Err(_), Err(_)) => std::cmp::Ordering::Equal,
}
}
fn select_best_tag<'a>(
tags: &[&'a str],
current_ref: &str,
allow_non_version_like: bool,
) -> Option<&'a str> {
let has_version_like = tags.iter().any(|tag| tag.contains('.'));
let mut candidates = if has_version_like {
tags.iter()
.filter(|tag| tag.contains('.'))
.copied()
.collect::<Vec<_>>()
} else if allow_non_version_like {
tags.to_vec()
} else {
return None;
};
candidates.sort_by(|tag_a, tag_b| {
levenshtein::levenshtein(tag_a, current_ref)
.cmp(&levenshtein::levenshtein(tag_b, current_ref))
.then_with(|| compare_tag_versions_desc(tag_a, tag_b))
.then_with(|| tag_a.cmp(tag_b))
});
candidates.into_iter().next()
}
async fn checkout_and_validate_manifest(
repo_path: &Path,
rev: &str,
required_hook_ids: &[&str],
) -> Result<()> {
if cfg!(windows) {
git::git_cmd("git show")?
.arg("show")
.arg(format!("{rev}:{PRE_COMMIT_HOOKS_YAML}"))
.current_dir(repo_path)
.remove_git_envs()
.stdout(Stdio::null())
.stderr(Stdio::null())
.status()
.await?;
}
git::git_cmd("git checkout")?
.arg("checkout")
.arg("--quiet")
.arg(rev)
.arg("--")
.arg(PRE_COMMIT_HOOKS_YAML)
.current_dir(repo_path)
.remove_git_envs()
.stdout(Stdio::null())
.stderr(Stdio::null())
.status()
.await?;
let manifest = config::read_manifest(&repo_path.join(PRE_COMMIT_HOOKS_YAML))?;
let new_hook_ids = manifest
.hooks
.into_iter()
.map(|h| h.id)
.collect::<FxHashSet<_>>();
let hooks_missing = required_hook_ids
.iter()
.filter(|hook_id| !new_hook_ids.contains(**hook_id))
.collect::<Vec<_>>();
if !hooks_missing.is_empty() {
anyhow::bail!(
"Cannot update to rev `{}`, hook{} {} missing: {}",
rev,
if hooks_missing.len() > 1 { "s" } else { "" },
if hooks_missing.len() > 1 { "are" } else { "is" },
hooks_missing.into_iter().join(", ")
);
}
Ok(())
}
async fn write_new_config(path: &Path, revisions: &[Option<Revision>]) -> Result<()> {
let content = fs_err::tokio::read_to_string(path).await?;
let new_content = match path.extension() {
Some(ext) if ext.eq_ignore_ascii_case("toml") => {
render_updated_toml_config(path, &content, revisions)?
}
_ => render_updated_yaml_config(path, &content, revisions)?,
};
fs_err::tokio::write(path, new_content)
.await
.with_context(|| {
format!(
"Failed to write updated config file `{}`",
path.user_display()
)
})?;
Ok(())
}
fn render_updated_toml_config(
path: &Path,
content: &str,
revisions: &[Option<Revision>],
) -> Result<String> {
let mut doc = content.parse::<DocumentMut>()?;
let Some(repos) = doc
.get_mut("repos")
.and_then(|item| item.as_array_of_tables_mut())
else {
anyhow::bail!("Missing `[[repos]]` array in `{}`", path.user_display());
};
let mut remote_repos = Vec::new();
for table in repos.iter_mut() {
let repo_value = table
.get("repo")
.and_then(|item| item.as_value())
.and_then(|value| value.as_str())
.unwrap_or_default();
if matches!(repo_value, "local" | "meta" | "builtin") {
continue;
}
if !table.contains_key("rev") {
anyhow::bail!(
"Found remote repo without `rev` in `{}`",
path.user_display()
);
}
remote_repos.push(table);
}
if remote_repos.len() != revisions.len() {
anyhow::bail!(
"Found {} remote repos in `{}` but expected {}, file content may have changed",
remote_repos.len(),
path.user_display(),
revisions.len()
);
}
for (table, revision) in remote_repos.into_iter().zip_eq(revisions) {
let Some(revision) = revision else {
continue;
};
let Some(value) = table.get_mut("rev").and_then(|item| item.as_value_mut()) else {
continue;
};
let suffix = value
.decor()
.suffix()
.and_then(|s| s.as_str())
.filter(|s| !s.trim_start().starts_with("# frozen:"))
.map(str::to_string);
*value = toml_edit::Value::from(revision.rev.clone());
if let Some(frozen) = &revision.frozen {
value.decor_mut().set_suffix(format!(" # frozen: {frozen}"));
} else if let Some(suffix) = suffix {
value.decor_mut().set_suffix(suffix);
}
}
Ok(doc.to_string())
}
fn render_updated_yaml_config(
path: &Path,
content: &str,
revisions: &[Option<Revision>],
) -> Result<String> {
let mut lines = content
.split_inclusive('\n')
.map(ToString::to_string)
.collect::<Vec<_>>();
let rev_regex = regex!(r#"^(\s+)rev:(\s*)(['"]?)([^\s#]+)(.*)(\r?\n)$"#);
let rev_lines = lines
.iter()
.enumerate()
.filter_map(|(line_no, line)| {
if rev_regex.is_match(line) {
Some(line_no)
} else {
None
}
})
.collect::<Vec<_>>();
if rev_lines.len() != revisions.len() {
anyhow::bail!(
"Found {} `rev:` lines in `{}` but expected {}, file content may have changed",
rev_lines.len(),
path.user_display(),
revisions.len()
);
}
for (line_no, revision) in rev_lines.iter().zip_eq(revisions) {
let Some(revision) = revision else {
continue;
};
let caps = rev_regex
.captures(&lines[*line_no])
.context("Failed to capture rev line")?;
let new_rev = serialize_yaml_scalar(&revision.rev, &caps[3])?;
let comment = if let Some(frozen) = &revision.frozen {
format!(" # frozen: {frozen}")
} else if caps[5].trim_start().starts_with("# frozen:") {
String::new()
} else {
caps[5].to_string()
};
lines[*line_no] = format!(
"{}rev:{}{}{}{}",
&caps[1], &caps[2], new_rev, comment, &caps[6]
);
}
Ok(lines.join(""))
}
#[cfg(test)]
mod tests {
use super::*;
use crate::process::Cmd;
use std::time::{SystemTime, UNIX_EPOCH};
async fn setup_test_repo() -> tempfile::TempDir {
let tmp = tempfile::tempdir().unwrap();
let repo = tmp.path();
git::git_cmd("git init")
.unwrap()
.arg("init")
.current_dir(repo)
.remove_git_envs()
.output()
.await
.unwrap();
git::git_cmd("git config")
.unwrap()
.args(["config", "user.email", "test@test.com"])
.current_dir(repo)
.remove_git_envs()
.output()
.await
.unwrap();
git::git_cmd("git config")
.unwrap()
.args(["config", "user.name", "Test"])
.current_dir(repo)
.remove_git_envs()
.output()
.await
.unwrap();
git::git_cmd("git commit")
.unwrap()
.args([
"-c",
"commit.gpgsign=false",
"commit",
"--allow-empty",
"-m",
"initial",
])
.current_dir(repo)
.remove_git_envs()
.output()
.await
.unwrap();
git::git_cmd("git checkout")
.unwrap()
.args(["branch", "-M", "trunk"])
.current_dir(repo)
.remove_git_envs()
.output()
.await
.unwrap();
tmp
}
fn git_cmd(dir: impl AsRef<Path>, summary: &str) -> Cmd {
let mut cmd = git::git_cmd(summary).unwrap();
cmd.current_dir(dir)
.args(["-c", "commit.gpgsign=false"])
.args(["-c", "tag.gpgsign=false"]);
cmd
}
async fn create_commit(repo: &Path, message: &str) {
git_cmd(repo, "git commit")
.args(["commit", "--allow-empty", "-m", message])
.remove_git_envs()
.output()
.await
.unwrap();
}
async fn create_backdated_commit(repo: &Path, message: &str, days_ago: u64) {
let timestamp = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs()
- (days_ago * 86400);
let date_str = format!("{timestamp} +0000");
git_cmd(repo, "git commit")
.args(["commit", "--allow-empty", "-m", message])
.env("GIT_AUTHOR_DATE", &date_str)
.env("GIT_COMMITTER_DATE", &date_str)
.remove_git_envs()
.output()
.await
.unwrap();
}
async fn create_lightweight_tag(repo: &Path, tag: &str) {
git_cmd(repo, "git tag")
.arg("tag")
.arg(tag)
.remove_git_envs()
.output()
.await
.unwrap();
}
async fn create_annotated_tag(repo: &Path, tag: &str, days_ago: u64) {
let timestamp = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs()
- (days_ago * 86400);
let date_str = format!("{timestamp} +0000");
git_cmd(repo, "git tag")
.arg("tag")
.arg(tag)
.arg("-m")
.arg(tag)
.env("GIT_AUTHOR_DATE", &date_str)
.env("GIT_COMMITTER_DATE", &date_str)
.remove_git_envs()
.output()
.await
.unwrap();
}
fn get_backdated_timestamp(days_ago: u64) -> u64 {
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs();
now - (days_ago * 86400)
}
#[tokio::test]
async fn test_list_tag_metadata() {
let tmp = setup_test_repo().await;
let repo = tmp.path();
create_backdated_commit(repo, "old", 5).await;
create_lightweight_tag(repo, "v0.1.0").await;
create_backdated_commit(repo, "new", 2).await;
create_lightweight_tag(repo, "v0.2.0").await;
create_annotated_tag(repo, "alias-v0.2.0", 0).await;
let timestamps = list_tag_metadata(repo).await.unwrap();
assert_eq!(timestamps.len(), 3);
assert_eq!(timestamps[0].tag, "alias-v0.2.0");
assert_eq!(timestamps[1].tag, "v0.2.0");
assert_eq!(timestamps[2].tag, "v0.1.0");
assert_eq!(timestamps[0].commit, timestamps[1].commit);
}
#[tokio::test]
async fn test_resolve_bleeding_edge_prefers_exact_tag() {
let tmp = setup_test_repo().await;
let repo = tmp.path();
create_commit(repo, "tagged").await;
create_lightweight_tag(repo, "v1.2.3").await;
git::git_cmd("git fetch")
.unwrap()
.args(["fetch", ".", "HEAD"])
.current_dir(repo)
.remove_git_envs()
.output()
.await
.unwrap();
let rev = resolve_bleeding_edge(repo).await.unwrap();
assert_eq!(rev, Some("v1.2.3".to_string()));
}
#[tokio::test]
async fn test_resolve_bleeding_edge_falls_back_to_rev_parse() {
let tmp = setup_test_repo().await;
let repo = tmp.path();
create_commit(repo, "untagged").await;
git::git_cmd("git fetch")
.unwrap()
.args(["fetch", ".", "HEAD"])
.current_dir(repo)
.remove_git_envs()
.output()
.await
.unwrap();
let rev = resolve_bleeding_edge(repo).await.unwrap();
let head = git::git_cmd("git rev-parse")
.unwrap()
.args(["rev-parse", "HEAD"])
.current_dir(repo)
.remove_git_envs()
.output()
.await
.unwrap()
.stdout;
let head = String::from_utf8_lossy(&head).trim().to_string();
assert_eq!(rev, Some(head));
}
#[tokio::test]
async fn test_select_update_revision_uses_cooldown_bucket() {
let tmp = setup_test_repo().await;
let repo = tmp.path();
create_backdated_commit(repo, "candidate", 5).await;
create_lightweight_tag(repo, "v2.0.0-rc1").await;
create_lightweight_tag(repo, "totally-different").await;
create_backdated_commit(repo, "latest", 1).await;
create_lightweight_tag(repo, "v2.0.0").await;
let tag_timestamps = list_tag_metadata(repo).await.unwrap();
let rev = select_update_revision(repo, "v2.0.0", false, 3, &tag_timestamps)
.await
.unwrap();
assert_eq!(rev, Some("v2.0.0-rc1".to_string()));
}
#[tokio::test]
async fn test_select_update_revision_returns_none_when_all_tags_too_new() {
let tmp = setup_test_repo().await;
let repo = tmp.path();
create_backdated_commit(repo, "recent-1", 2).await;
create_lightweight_tag(repo, "v1.0.0").await;
create_backdated_commit(repo, "recent-2", 1).await;
create_lightweight_tag(repo, "v1.1.0").await;
let tag_timestamps = list_tag_metadata(repo).await.unwrap();
let rev = select_update_revision(repo, "v1.1.0", false, 5, &tag_timestamps)
.await
.unwrap();
assert_eq!(rev, None);
}
#[tokio::test]
async fn test_select_update_revision_picks_oldest_eligible_bucket() {
let tmp = setup_test_repo().await;
let repo = tmp.path();
create_backdated_commit(repo, "oldest", 10).await;
create_lightweight_tag(repo, "v1.0.0").await;
create_backdated_commit(repo, "mid", 4).await;
create_lightweight_tag(repo, "v1.1.0").await;
create_backdated_commit(repo, "newest", 1).await;
create_lightweight_tag(repo, "v1.2.0").await;
let tag_timestamps = list_tag_metadata(repo).await.unwrap();
let rev = select_update_revision(repo, "v1.2.0", false, 5, &tag_timestamps)
.await
.unwrap();
assert_eq!(rev, Some("v1.0.0".to_string()));
}
#[tokio::test]
async fn test_select_update_revision_prefers_version_like_tags() {
let tmp = setup_test_repo().await;
let repo = tmp.path();
create_backdated_commit(repo, "eligible", 2).await;
create_lightweight_tag(repo, "moving-tag").await;
create_lightweight_tag(repo, "v1.0.0").await;
let tag_timestamps = list_tag_metadata(repo).await.unwrap();
let rev = select_update_revision(repo, "moving-tag", false, 1, &tag_timestamps)
.await
.unwrap();
assert_eq!(rev, Some("v1.0.0".to_string()));
}
#[tokio::test]
async fn test_select_update_revision_picks_closest_version_string() {
let tmp = setup_test_repo().await;
let repo = tmp.path();
create_backdated_commit(repo, "eligible", 3).await;
create_lightweight_tag(repo, "v1.2.0").await;
create_lightweight_tag(repo, "foo-1.2.0").await;
create_lightweight_tag(repo, "v2.0.0").await;
let tag_timestamps = list_tag_metadata(repo).await.unwrap();
let rev = select_update_revision(repo, "v1.2.3", false, 1, &tag_timestamps)
.await
.unwrap();
assert_eq!(rev, Some("v1.2.0".to_string()));
}
#[tokio::test]
async fn test_list_tag_metadata_stable_order_for_equal_timestamps() {
let tmp = setup_test_repo().await;
let repo = tmp.path();
create_backdated_commit(repo, "release", 5).await;
create_lightweight_tag(repo, "v1.0.0").await;
create_lightweight_tag(repo, "v1.0.3").await;
create_lightweight_tag(repo, "v1.0.5").await;
create_lightweight_tag(repo, "v1.0.2").await;
let timestamps = list_tag_metadata(repo).await.unwrap();
let tags: Vec<&str> = timestamps.iter().map(|tag| tag.tag.as_str()).collect();
assert_eq!(tags, vec!["v1.0.5", "v1.0.3", "v1.0.2", "v1.0.0"]);
}
#[tokio::test]
async fn test_list_tag_metadata_deterministic_order_for_equal_timestamp_non_semver() {
let tmp = setup_test_repo().await;
let repo = tmp.path();
create_backdated_commit(repo, "release", 5).await;
create_lightweight_tag(repo, "beta").await;
create_lightweight_tag(repo, "alpha").await;
create_lightweight_tag(repo, "gamma").await;
let timestamps = list_tag_metadata(repo).await.unwrap();
let tags: Vec<&str> = timestamps.iter().map(|tag| tag.tag.as_str()).collect();
assert_eq!(tags, vec!["alpha", "beta", "gamma"]);
}
#[test]
fn test_no_lazy_fetch_unsupported() {
assert!(no_lazy_fetch_unsupported(
b"unknown option: --no-lazy-fetch\n"
));
assert!(!no_lazy_fetch_unsupported(
b"fatal: Not a valid object name 1234567890abcdef1234567890abcdef12345678^{commit}\n"
));
}
}