use std::ops::Range;
use std::path::{Path, PathBuf};
use anyhow::{Context, Result};
use futures::{StreamExt, TryStreamExt};
use rustc_hash::FxHashMap;
use crate::cli::ExitStatus;
use crate::cli::auto_update::config::write_new_config;
use crate::cli::auto_update::display::{apply_repo_updates, warn_frozen_mismatches};
use crate::cli::auto_update::source::{collect_repo_sources, evaluate_repo_source};
use crate::cli::reporter::AutoUpdateReporter;
use crate::cli::run::Selectors;
use crate::config::GlobPatterns;
use crate::fs::CWD;
use crate::printer::Printer;
use crate::run::CONCURRENCY;
use crate::store::Store;
use crate::workspace::{Project, Workspace};
mod config;
mod display;
mod repository;
mod source;
#[derive(Default, Clone)]
struct Revision {
rev: String,
frozen: Option<String>,
}
struct RepoUsage<'a> {
project: &'a Project,
remote_count: usize,
remote_index: usize,
rev_line_number: usize,
current_frozen: Option<String>,
current_frozen_site: Option<FrozenCommentSite>,
}
struct RepoTarget<'a> {
repo: &'a str,
current_rev: &'a str,
required_hook_ids: Vec<&'a str>,
usages: Vec<RepoUsage<'a>>,
}
struct RepoSource<'a> {
repo: &'a str,
targets: Vec<RepoTarget<'a>>,
}
enum FrozenMismatchAction {
ReplaceWith(String),
Remove,
NoReplacement,
}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
enum CommitPresence {
Present,
Absent,
Unknown,
}
enum FrozenMismatchReason {
ResolvesToDifferentCommit,
Unresolvable,
}
struct FrozenMismatch<'a> {
project: &'a Project,
remote_size: usize,
remote_index: usize,
rev_line_number: usize,
current_frozen: String,
frozen_site: Option<FrozenCommentSite>,
reason: FrozenMismatchReason,
current_rev_presence: CommitPresence,
action: FrozenMismatchAction,
}
#[derive(Clone)]
struct FrozenCommentSite {
line_number: usize,
source_line: String,
span: Range<usize>,
}
#[derive(Clone)]
struct FrozenRef {
line_number: usize,
current_frozen: Option<String>,
site: Option<FrozenCommentSite>,
}
#[derive(Clone)]
struct TagTimestamp {
tag: String,
timestamp: u64,
commit: String,
}
struct TagFilters {
global_include: GlobPatterns,
global_exclude: GlobPatterns,
repo_include: FxHashMap<String, GlobPatterns>,
repo_exclude: FxHashMap<String, GlobPatterns>,
}
impl TagFilters {
fn new(
include_tag: Vec<String>,
exclude_tag: Vec<String>,
repo_include_tag: Vec<String>,
repo_exclude_tag: Vec<String>,
) -> Result<Self> {
Ok(Self {
global_include: GlobPatterns::new(include_tag)
.context("Invalid --include-tag pattern")?,
global_exclude: GlobPatterns::new(exclude_tag)
.context("Invalid --exclude-tag pattern")?,
repo_include: build_repo_tag_patterns(repo_include_tag, "--repo-include-tag")?,
repo_exclude: build_repo_tag_patterns(repo_exclude_tag, "--repo-exclude-tag")?,
})
}
fn filter<'a>(&self, repo: &str, tags: &'a [TagTimestamp]) -> Vec<&'a TagTimestamp> {
tags.iter()
.filter(|tag| self.is_included(repo, &tag.tag) && !self.is_excluded(repo, &tag.tag))
.collect()
}
fn is_included(&self, repo: &str, tag: &str) -> bool {
if let Some(repo_include) = self.repo_include.get(repo) {
return repo_include.is_empty() || repo_include.is_match(tag);
}
self.global_include.is_empty() || self.global_include.is_match(tag)
}
fn is_excluded(&self, repo: &str, tag: &str) -> bool {
self.global_exclude.is_match(tag)
|| self
.repo_exclude
.get(repo)
.is_some_and(|set| set.is_match(tag))
}
}
fn build_repo_tag_patterns(
values: Vec<String>,
option: &str,
) -> Result<FxHashMap<String, GlobPatterns>> {
let mut patterns_by_repo: FxHashMap<String, Vec<String>> = FxHashMap::default();
for value in values {
let (repo, pattern) = value.rsplit_once('=').ok_or_else(|| {
anyhow::anyhow!("Invalid {option} value `{value}`: expected `<repo>=<pattern>`")
})?;
if repo.is_empty() || pattern.is_empty() {
anyhow::bail!("Invalid {option} value `{value}`: expected `<repo>=<pattern>`");
}
patterns_by_repo
.entry(repo.to_string())
.or_default()
.push(pattern.to_string());
}
patterns_by_repo
.into_iter()
.map(|(repo, patterns)| {
Ok((
repo,
GlobPatterns::new(patterns).with_context(|| format!("Invalid {option} pattern"))?,
))
})
.collect()
}
struct ResolvedRepoUpdate<'a> {
revision: Revision,
frozen_mismatches: Vec<FrozenMismatch<'a>>,
}
struct RepoUpdate<'a> {
target: &'a RepoTarget<'a>,
result: Result<ResolvedRepoUpdate<'a>>,
}
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
struct ProjectUpdateKey<'a> {
config_file: &'a Path,
}
impl<'a> ProjectUpdateKey<'a> {
fn config_file(self) -> &'a Path {
self.config_file
}
}
impl<'a> From<&'a Project> for ProjectUpdateKey<'a> {
fn from(project: &'a Project) -> Self {
Self {
config_file: project.config_file(),
}
}
}
type ProjectUpdates<'a> = FxHashMap<ProjectUpdateKey<'a>, Vec<Option<Revision>>>;
struct ApplyRepoUpdatesResult {
failure: bool,
has_updates: bool,
}
enum DisplayEventKind {
Update { current: Revision, next: Revision },
FrozenUpdate { current: String, next: String },
FrozenRemove { current: String },
UpToDate { current: Revision },
Failure { error: String },
}
#[derive(Clone, Copy, Eq, PartialEq)]
enum DisplayStream {
Stdout,
Stderr,
}
struct DisplayEvent<'a> {
stream: DisplayStream,
project: &'a Project,
repo: &'a str,
remote_index: usize,
line_number: usize,
kind: DisplayEventKind,
}
struct FrozenWarningEvent<'a> {
project: &'a Project,
repo: &'a str,
current_rev: &'a str,
remote_index: usize,
mismatch: &'a FrozenMismatch<'a>,
}
type RepoOccurrences<'a> = FxHashMap<(&'a Path, &'a str), usize>;
#[expect(clippy::fn_params_excessive_bools)]
pub(crate) async fn auto_update(
store: &Store,
config: Option<PathBuf>,
filter_repos: Vec<String>,
exclude_repos: Vec<String>,
include_tag: Vec<String>,
exclude_tag: Vec<String>,
repo_include_tag: Vec<String>,
repo_exclude_tag: Vec<String>,
verbose: bool,
bleeding_edge: bool,
freeze: bool,
jobs: usize,
dry_run: bool,
exit_code: bool,
cooldown_days: u8,
printer: Printer,
) -> Result<ExitStatus> {
let tag_filters =
TagFilters::new(include_tag, exclude_tag, repo_include_tag, repo_exclude_tag)?;
let workspace_root = Workspace::find_root(config.as_deref(), &CWD)?;
let selectors = Selectors::default();
let workspace = Workspace::discover(store, workspace_root, config, Some(&selectors), true)?;
let jobs = if jobs == 0 { *CONCURRENCY } else { jobs };
let reporter = AutoUpdateReporter::new(printer);
let repo_sources = collect_repo_sources(&workspace)?;
let sources = repo_sources.iter().filter(|repo_source| {
(filter_repos.is_empty() || filter_repos.iter().any(|repo| repo == repo_source.repo))
&& !exclude_repos.iter().any(|repo| repo == repo_source.repo)
});
let outcomes: Vec<RepoUpdate<'_>> = futures::stream::iter(sources)
.map(async |repo_source| {
let progress = reporter.on_update_start(repo_source.repo);
let result = evaluate_repo_source(
repo_source,
bleeding_edge,
freeze,
cooldown_days,
&tag_filters,
)
.await;
reporter.on_update_complete(progress);
result
})
.buffer_unordered(jobs)
.try_collect::<Vec<_>>()
.await?
.into_iter()
.flatten()
.collect();
reporter.on_complete();
warn_frozen_mismatches(&outcomes, printer)?;
let mut project_updates: ProjectUpdates<'_> = FxHashMap::default();
let apply_result =
apply_repo_updates(outcomes, verbose, dry_run, printer, &mut project_updates)?;
if !dry_run {
for (project, revisions) in project_updates {
if revisions.iter().any(Option::is_some) {
write_new_config(project.config_file(), &revisions).await?;
}
}
}
if apply_result.failure || (exit_code && apply_result.has_updates) {
return Ok(ExitStatus::Failure);
}
Ok(ExitStatus::Success)
}
#[cfg(test)]
mod tests {
use super::*;
fn tag(name: &str) -> TagTimestamp {
TagTimestamp {
tag: name.to_string(),
timestamp: 0,
commit: String::new(),
}
}
fn filtered_tags(filters: &TagFilters, repo: &str, tags: &[TagTimestamp]) -> Vec<String> {
filters
.filter(repo, tags)
.into_iter()
.map(|tag| tag.tag.clone())
.collect()
}
#[test]
fn tag_filters_keep_all_tags_without_filters() {
let filters = TagFilters::new(Vec::new(), Vec::new(), Vec::new(), Vec::new()).unwrap();
let tags = [tag("v1.0.0"), tag("nightly")];
assert_eq!(
filtered_tags(&filters, "https://example.com/repo", &tags),
vec!["v1.0.0", "nightly"]
);
}
#[test]
fn tag_filters_repo_include_overrides_global_include() {
let filters = TagFilters::new(
vec!["v1.*".to_string()],
Vec::new(),
vec!["https://example.com/repo=v*.1.0".to_string()],
Vec::new(),
)
.unwrap();
let tags = [tag("v1.0.0"), tag("v1.1.0"), tag("v2.1.0")];
assert_eq!(
filtered_tags(&filters, "https://example.com/repo", &tags),
vec!["v1.1.0", "v2.1.0"]
);
assert_eq!(
filtered_tags(&filters, "https://example.com/other", &tags),
vec!["v1.0.0", "v1.1.0"]
);
}
#[test]
fn tag_filters_apply_excludes_after_includes() {
let filters = TagFilters::new(
vec!["v*".to_string()],
vec!["*-rc*".to_string()],
Vec::new(),
vec!["https://example.com/repo=v2.*".to_string()],
)
.unwrap();
let tags = [
tag("v1.0.0"),
tag("v2.0.0"),
tag("v3.0.0-rc1"),
tag("nightly"),
];
assert_eq!(
filtered_tags(&filters, "https://example.com/repo", &tags),
vec!["v1.0.0"]
);
assert_eq!(
filtered_tags(&filters, "https://example.com/other", &tags),
vec!["v1.0.0", "v2.0.0"]
);
}
#[test]
fn tag_filters_reject_invalid_repo_filter_values() {
let result = TagFilters::new(
Vec::new(),
Vec::new(),
vec!["https://example.com/repo".to_string()],
Vec::new(),
);
match result {
Ok(_) => panic!("expected invalid repo tag filter to fail"),
Err(err) => assert!(
err.to_string().contains("expected `<repo>=<pattern>`"),
"{err:#}"
),
}
}
}