use crate::cli::StatsCommand;
use ktstr::cli;
pub(crate) fn run_stats(command: &Option<StatsCommand>) -> Result<(), String> {
match command {
None => {
if let Some(output) = cli::print_stats_report() {
print!("{output}");
}
Ok(())
}
Some(StatsCommand::List) => cli::list_runs().map_err(|e| format!("{e:#}")),
Some(StatsCommand::ListMetrics { json }) => match cli::list_metrics(*json) {
Ok(s) => {
print!("{s}");
Ok(())
}
Err(e) => Err(format!("{e:#}")),
},
Some(StatsCommand::ListValues { json, dir }) => {
match cli::list_values(*json, dir.as_deref()) {
Ok(s) => {
print!("{s}");
Ok(())
}
Err(e) => Err(format!("{e:#}")),
}
}
Some(StatsCommand::ShowHost { run, dir }) => {
match cli::show_run_host(run, dir.as_deref()) {
Ok(s) => {
print!("{s}");
Ok(())
}
Err(e) => Err(format!("{e:#}")),
}
}
Some(StatsCommand::ExplainSidecar { run, dir, json }) => {
match cli::explain_sidecar(run, dir.as_deref(), *json) {
Ok(s) => {
print!("{s}");
Ok(())
}
Err(e) => Err(format!("{e:#}")),
}
}
Some(StatsCommand::Compare {
filter,
threshold,
policy,
dir,
kernel,
project_commit,
kernel_commit,
run_source,
scheduler,
topology,
work_type,
a_kernel,
a_project_commit,
a_kernel_commit,
a_run_source,
a_scheduler,
a_topology,
a_work_type,
b_kernel,
b_project_commit,
b_kernel_commit,
b_run_source,
b_scheduler,
b_topology,
b_work_type,
no_average,
}) => {
let resolved_policy = match (threshold, policy.as_ref()) {
(Some(t), None) => {
let p = ktstr::cli::ComparisonPolicy::uniform(*t);
p.validate().map_err(|e| format!("{e:#}"))?;
p
}
(None, Some(path)) => {
ktstr::cli::ComparisonPolicy::load_json(path).map_err(|e| format!("{e:#}"))?
}
(None, None) => ktstr::cli::ComparisonPolicy::default(),
(Some(_), Some(_)) => {
unreachable!(
"clap `conflicts_with` on --threshold / --policy \
must enforce mutual exclusion at parse time",
);
}
};
let project_repo = std::env::current_dir()
.ok()
.and_then(|cwd| gix::discover(cwd).ok());
let kernel_repo = ktstr::ktstr_kernel_env()
.map(std::path::PathBuf::from)
.and_then(|p| {
let target = ktstr::cache::recover_local_source_tree(&p).unwrap_or(p);
gix::open(target).ok()
});
let project_commit =
resolve_commit_specs(project_repo.as_ref(), project_commit, "project-commit");
let kernel_commit =
resolve_commit_specs(kernel_repo.as_ref(), kernel_commit, "kernel-commit");
let a_project_commit =
resolve_commit_specs(project_repo.as_ref(), a_project_commit, "a-project-commit");
let a_kernel_commit =
resolve_commit_specs(kernel_repo.as_ref(), a_kernel_commit, "a-kernel-commit");
let b_project_commit =
resolve_commit_specs(project_repo.as_ref(), b_project_commit, "b-project-commit");
let b_kernel_commit =
resolve_commit_specs(kernel_repo.as_ref(), b_kernel_commit, "b-kernel-commit");
let build = BuildCompareFilters {
shared_kernel: kernel.clone(),
shared_project_commit: project_commit,
shared_kernel_commit: kernel_commit,
shared_run_source: run_source.clone(),
shared_scheduler: scheduler.clone(),
shared_topology: topology.clone(),
shared_work_type: work_type.clone(),
a_kernel: a_kernel.clone(),
a_project_commit,
a_kernel_commit,
a_run_source: a_run_source.clone(),
a_scheduler: a_scheduler.clone(),
a_topology: a_topology.clone(),
a_work_type: a_work_type.clone(),
b_kernel: b_kernel.clone(),
b_project_commit,
b_kernel_commit,
b_run_source: b_run_source.clone(),
b_scheduler: b_scheduler.clone(),
b_topology: b_topology.clone(),
b_work_type: b_work_type.clone(),
};
let (filter_a, filter_b) = build.build();
let exit = cli::compare_partitions(
&filter_a,
&filter_b,
filter.as_deref(),
&resolved_policy,
dir.as_deref(),
*no_average,
)
.map_err(|e| format!("{e:#}"))?;
if exit != 0 {
std::process::exit(exit);
}
Ok(())
}
}
}
pub(crate) fn looks_like_literal_hash(input: &str) -> bool {
let core = input.strip_suffix("-dirty").unwrap_or(input);
let len = core.len();
if !(7..=40).contains(&len) {
return false;
}
core.bytes().all(|b| b.is_ascii_hexdigit())
}
pub(crate) fn resolve_commit_specs(
repo: Option<&gix::Repository>,
raw: &[String],
flag_name: &str,
) -> Vec<String> {
let Some(repo) = repo else {
return raw.to_vec();
};
let head_oid: Option<gix::ObjectId> = repo.head_id().ok().map(|id| id.detach());
let head_dirty: bool = head_oid
.as_ref()
.and_then(|_| ktstr::test_support::repo_is_dirty(repo))
.unwrap_or(false);
let format_short = |id: gix::ObjectId| -> String {
let short = id.to_hex_with_len(7).to_string();
if head_dirty && head_oid == Some(id) {
format!("{short}-dirty")
} else {
short
}
};
let mut out: Vec<String> = Vec::with_capacity(raw.len());
for input in raw {
match repo.rev_parse(input.as_str()) {
Ok(spec) => match spec.detach() {
gix::revision::plumbing::Spec::Include(id)
| gix::revision::plumbing::Spec::ExcludeParents(id) => {
out.push(format_short(id));
}
gix::revision::plumbing::Spec::Range { from, to } => {
match repo.rev_walk([to]).with_hidden([from]).all() {
Ok(walk) => {
for info in walk.flatten() {
out.push(info.id.to_hex_with_len(7).to_string());
}
}
Err(err) => {
eprintln!(
"cargo ktstr: --{flag_name} range '{input}' could \
not be expanded: {err}; using as literal filter",
);
out.push(input.clone());
}
}
}
gix::revision::plumbing::Spec::Exclude(_)
| gix::revision::plumbing::Spec::Merge { .. }
| gix::revision::plumbing::Spec::IncludeOnlyParents(_) => {
eprintln!(
"cargo ktstr: --{flag_name} '{input}' uses an unsupported \
revspec form (Exclude/Merge/IncludeOnlyParents); using \
as literal filter",
);
out.push(input.clone());
}
},
Err(_) => {
if !looks_like_literal_hash(input) {
eprintln!(
"cargo ktstr: --{flag_name} '{input}' did not resolve as \
a revspec; using as literal filter",
);
}
out.push(input.clone());
}
}
}
out
}
#[derive(Debug, Clone, Default)]
pub(crate) struct BuildCompareFilters {
pub(crate) shared_kernel: Vec<String>,
pub(crate) shared_project_commit: Vec<String>,
pub(crate) shared_kernel_commit: Vec<String>,
pub(crate) shared_run_source: Vec<String>,
pub(crate) shared_scheduler: Vec<String>,
pub(crate) shared_topology: Vec<String>,
pub(crate) shared_work_type: Vec<String>,
pub(crate) a_kernel: Vec<String>,
pub(crate) a_project_commit: Vec<String>,
pub(crate) a_kernel_commit: Vec<String>,
pub(crate) a_run_source: Vec<String>,
pub(crate) a_scheduler: Vec<String>,
pub(crate) a_topology: Vec<String>,
pub(crate) a_work_type: Vec<String>,
pub(crate) b_kernel: Vec<String>,
pub(crate) b_project_commit: Vec<String>,
pub(crate) b_kernel_commit: Vec<String>,
pub(crate) b_run_source: Vec<String>,
pub(crate) b_scheduler: Vec<String>,
pub(crate) b_topology: Vec<String>,
pub(crate) b_work_type: Vec<String>,
}
impl BuildCompareFilters {
pub(crate) fn build(&self) -> (ktstr::cli::RowFilter, ktstr::cli::RowFilter) {
let pick_vec = |a: &[String], shared: &[String]| -> Vec<String> {
if a.is_empty() {
shared.to_vec()
} else {
a.to_vec()
}
};
let filter_a = ktstr::cli::RowFilter {
kernels: pick_vec(&self.a_kernel, &self.shared_kernel),
project_commits: pick_vec(&self.a_project_commit, &self.shared_project_commit),
kernel_commits: pick_vec(&self.a_kernel_commit, &self.shared_kernel_commit),
run_sources: pick_vec(&self.a_run_source, &self.shared_run_source),
schedulers: pick_vec(&self.a_scheduler, &self.shared_scheduler),
topologies: pick_vec(&self.a_topology, &self.shared_topology),
work_types: pick_vec(&self.a_work_type, &self.shared_work_type),
};
let filter_b = ktstr::cli::RowFilter {
kernels: pick_vec(&self.b_kernel, &self.shared_kernel),
project_commits: pick_vec(&self.b_project_commit, &self.shared_project_commit),
kernel_commits: pick_vec(&self.b_kernel_commit, &self.shared_kernel_commit),
run_sources: pick_vec(&self.b_run_source, &self.shared_run_source),
schedulers: pick_vec(&self.b_scheduler, &self.shared_scheduler),
topologies: pick_vec(&self.b_topology, &self.shared_topology),
work_types: pick_vec(&self.b_work_type, &self.shared_work_type),
};
(filter_a, filter_b)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn build_compare_filters_empty_yields_default_default() {
let b = BuildCompareFilters::default();
let (fa, fb) = b.build();
assert!(fa.kernels.is_empty());
assert!(fa.project_commits.is_empty());
assert!(fa.kernel_commits.is_empty());
assert!(fa.run_sources.is_empty());
assert!(fa.schedulers.is_empty());
assert!(fa.topologies.is_empty());
assert!(fa.work_types.is_empty());
assert_eq!(fa.kernels, fb.kernels);
assert_eq!(fa.project_commits, fb.project_commits);
assert_eq!(fa.kernel_commits, fb.kernel_commits);
assert_eq!(fa.run_sources, fb.run_sources);
assert_eq!(fa.schedulers, fb.schedulers);
assert_eq!(fa.topologies, fb.topologies);
assert_eq!(fa.work_types, fb.work_types);
}
#[test]
fn build_compare_filters_per_side_kernel_commit_overrides_shared() {
let b = BuildCompareFilters {
shared_kernel_commit: vec!["abcdef1".to_string(), "fedcba2".to_string()],
a_kernel_commit: vec!["111aaaa".to_string()],
..BuildCompareFilters::default()
};
let (fa, fb) = b.build();
assert_eq!(
fa.kernel_commits,
vec!["111aaaa"],
"A overrides shared kernel-commit",
);
assert_eq!(
fb.kernel_commits,
vec!["abcdef1", "fedcba2"],
"B retains shared kernel-commit default",
);
}
#[test]
fn build_compare_filters_disjoint_per_side_kernel_commit_slices() {
let b = BuildCompareFilters {
a_kernel_commit: vec!["abcdef1".to_string()],
b_kernel_commit: vec!["fedcba2".to_string()],
..BuildCompareFilters::default()
};
let (fa, fb) = b.build();
assert_eq!(fa.kernel_commits, vec!["abcdef1"]);
assert_eq!(fb.kernel_commits, vec!["fedcba2"]);
let slicing = ktstr::cli::derive_slicing_dims(&fa, &fb);
assert_eq!(
slicing,
vec![ktstr::cli::Dimension::KernelCommit],
"differing per-side kernel-commit must derive as a single \
KernelCommit slicing dim",
);
}
#[test]
fn build_compare_filters_shared_kernel_pins_both_sides() {
let b = BuildCompareFilters {
shared_kernel: vec!["6.14".to_string()],
..BuildCompareFilters::default()
};
let (fa, fb) = b.build();
assert_eq!(fa.kernels, vec!["6.14"]);
assert_eq!(fb.kernels, vec!["6.14"]);
}
#[test]
fn build_compare_filters_per_side_overrides_shared_for_that_side_only() {
let b = BuildCompareFilters {
shared_kernel: vec!["6.14".to_string(), "6.15".to_string()],
a_kernel: vec!["6.13".to_string()],
..BuildCompareFilters::default()
};
let (fa, fb) = b.build();
assert_eq!(fa.kernels, vec!["6.13"], "A overrides shared");
assert_eq!(fb.kernels, vec!["6.14", "6.15"], "B retains shared default",);
}
#[test]
fn build_compare_filters_disjoint_per_side_kernel_yields_two_filters() {
let b = BuildCompareFilters {
a_kernel: vec!["6.14".to_string()],
b_kernel: vec!["6.15".to_string()],
..BuildCompareFilters::default()
};
let (fa, fb) = b.build();
assert_eq!(fa.kernels, vec!["6.14"]);
assert_eq!(fb.kernels, vec!["6.15"]);
}
#[test]
fn build_compare_filters_per_side_scheduler_overrides_shared() {
let b = BuildCompareFilters {
shared_scheduler: vec!["scx_default".to_string()],
a_scheduler: vec!["scx_alpha".to_string()],
..BuildCompareFilters::default()
};
let (fa, fb) = b.build();
assert_eq!(
fa.schedulers,
vec!["scx_alpha".to_string()],
"A overrides shared scheduler",
);
assert_eq!(
fb.schedulers,
vec!["scx_default".to_string()],
"B retains shared scheduler when only --a-scheduler overrides",
);
}
#[test]
fn build_compare_filters_shared_pin_plus_per_side_slice() {
let b = BuildCompareFilters {
shared_kernel: vec!["6.14".to_string()],
a_scheduler: vec!["scx_alpha".to_string()],
b_scheduler: vec!["scx_beta".to_string()],
..BuildCompareFilters::default()
};
let (fa, fb) = b.build();
assert_eq!(fa.kernels, vec!["6.14"]);
assert_eq!(fb.kernels, vec!["6.14"]);
assert_eq!(fa.schedulers, vec!["scx_alpha".to_string()]);
assert_eq!(fb.schedulers, vec!["scx_beta".to_string()]);
let slicing = ktstr::cli::derive_slicing_dims(&fa, &fb);
assert_eq!(slicing, vec![ktstr::cli::Dimension::Scheduler]);
}
#[test]
fn build_compare_filters_empty_run_sources_field_equal_on_both_sides() {
let b = BuildCompareFilters::default();
let (fa, fb) = b.build();
assert!(
fa.run_sources.is_empty(),
"empty BuildCompareFilters must produce A-side filter with empty run_sources",
);
assert!(
fb.run_sources.is_empty(),
"empty BuildCompareFilters must produce B-side filter with empty run_sources",
);
assert_eq!(
fa.run_sources, fb.run_sources,
"both sides must agree on empty run_sources",
);
}
#[test]
fn build_compare_filters_disjoint_per_side_source_yields_two_filters() {
let b = BuildCompareFilters {
a_run_source: vec!["ci".to_string()],
b_run_source: vec!["local".to_string()],
..BuildCompareFilters::default()
};
let (fa, fb) = b.build();
assert_eq!(fa.run_sources, vec!["ci".to_string()]);
assert_eq!(fb.run_sources, vec!["local".to_string()]);
}
#[test]
fn build_compare_filters_shared_source_pins_both_sides() {
let b = BuildCompareFilters {
shared_run_source: vec!["ci".to_string()],
..BuildCompareFilters::default()
};
let (fa, fb) = b.build();
assert_eq!(fa.run_sources, vec!["ci".to_string()]);
assert_eq!(fb.run_sources, vec!["ci".to_string()]);
}
#[test]
fn build_compare_filters_per_side_source_overrides_shared_for_that_side_only() {
let b = BuildCompareFilters {
shared_run_source: vec!["local".to_string(), "archive".to_string()],
a_run_source: vec!["ci".to_string()],
..BuildCompareFilters::default()
};
let (fa, fb) = b.build();
assert_eq!(fa.run_sources, vec!["ci".to_string()], "A overrides shared");
assert_eq!(
fb.run_sources,
vec!["local".to_string(), "archive".to_string()],
"B retains shared default",
);
}
fn init_repo_with_chain(dir: &std::path::Path, n: usize) -> Vec<gix::ObjectId> {
let mut repo = gix::init(dir).expect("gix::init");
let _ = repo
.committer_or_set_generic_fallback()
.expect("committer fallback");
{
use gix::config::tree::gitoxide;
let mut cfg = gix::config::File::new(gix::config::file::Metadata::api());
cfg.set_raw_value(&gitoxide::Author::NAME_FALLBACK, "ktstr-test")
.expect("set author name fallback");
cfg.set_raw_value(
&gitoxide::Author::EMAIL_FALLBACK,
"ktstr-test@example.invalid",
)
.expect("set author email fallback");
let mut snap = repo.config_snapshot_mut();
snap.append(cfg);
}
let mut chain: Vec<gix::ObjectId> = Vec::with_capacity(n);
for i in 0..n {
let blob_id: gix::ObjectId = repo
.write_blob(format!("v{i}\n").as_bytes())
.expect("write blob")
.detach();
let tree = gix::objs::Tree {
entries: vec![gix::objs::tree::Entry {
mode: gix::objs::tree::EntryKind::Blob.into(),
filename: "file.txt".into(),
oid: blob_id,
}],
};
let tree_id: gix::ObjectId = repo.write_object(&tree).expect("write tree").detach();
let parents: Vec<gix::ObjectId> = chain.last().copied().into_iter().collect();
let commit_id: gix::ObjectId = repo
.commit("HEAD", format!("c{i}"), tree_id, parents)
.expect("commit")
.detach();
chain.push(commit_id);
}
chain
}
#[test]
fn resolve_commit_specs_no_repo_passes_through_literal() {
let raw = vec![
"abc1234".to_string(),
"main".to_string(),
"HEAD".to_string(),
];
let out = resolve_commit_specs(None, &raw, "test");
assert_eq!(out, raw, "no repo → every input lands as-is");
}
#[test]
fn resolve_commit_specs_head_resolves_to_short_hash() {
let tmp = tempfile::TempDir::new().unwrap();
let chain = init_repo_with_chain(tmp.path(), 3);
let head = *chain.last().unwrap();
let repo = gix::open(tmp.path()).expect("gix::open");
let raw = vec!["HEAD".to_string()];
let out = resolve_commit_specs(Some(&repo), &raw, "test");
assert_eq!(
out,
vec![head.to_hex_with_len(7).to_string()],
"HEAD must resolve to the tip commit's 7-char short hex",
);
}
#[test]
fn resolve_commit_specs_head_tilde_resolves_to_parent() {
let tmp = tempfile::TempDir::new().unwrap();
let chain = init_repo_with_chain(tmp.path(), 3);
let head_tilde_1 = chain[1];
let repo = gix::open(tmp.path()).expect("gix::open");
let raw = vec!["HEAD~1".to_string()];
let out = resolve_commit_specs(Some(&repo), &raw, "test");
assert_eq!(
out,
vec![head_tilde_1.to_hex_with_len(7).to_string()],
"HEAD~1 must resolve to the parent commit's 7-char short hex",
);
}
#[test]
fn resolve_commit_specs_range_expands_inclusive_of_to() {
let tmp = tempfile::TempDir::new().unwrap();
let chain = init_repo_with_chain(tmp.path(), 3);
let c0 = chain[0];
let c1 = chain[1];
let c2 = chain[2];
let repo = gix::open(tmp.path()).expect("gix::open");
let raw = vec![format!("{}..HEAD", c0.to_hex_with_len(40))];
let out = resolve_commit_specs(Some(&repo), &raw, "test");
assert!(
out.contains(&c1.to_hex_with_len(7).to_string()),
"range result must include c1 (the parent of HEAD); got {out:?}",
);
assert!(
out.contains(&c2.to_hex_with_len(7).to_string()),
"range result must include c2 (HEAD); got {out:?}",
);
assert!(
!out.contains(&c0.to_hex_with_len(7).to_string()),
"range result must NOT include c0 (the hidden side); got {out:?}",
);
assert_eq!(
out.len(),
2,
"range c0..HEAD over 3-commit chain must yield exactly 2 commits; got {out:?}",
);
}
#[test]
fn resolve_commit_specs_unknown_hash_falls_through_to_literal() {
let tmp = tempfile::TempDir::new().unwrap();
init_repo_with_chain(tmp.path(), 1);
let repo = gix::open(tmp.path()).expect("gix::open");
let raw = vec!["zzzzzzz".to_string()];
let out = resolve_commit_specs(Some(&repo), &raw, "test");
assert_eq!(
out,
vec!["zzzzzzz".to_string()],
"non-hex input must pass through as literal",
);
}
#[test]
fn resolve_commit_specs_dirty_suffix_falls_through_to_literal() {
let tmp = tempfile::TempDir::new().unwrap();
init_repo_with_chain(tmp.path(), 1);
let repo = gix::open(tmp.path()).expect("gix::open");
let raw = vec!["abc1234-dirty".to_string()];
let out = resolve_commit_specs(Some(&repo), &raw, "test");
assert_eq!(
out,
vec!["abc1234-dirty".to_string()],
"-dirty-suffixed input must pass through as literal",
);
}
#[test]
fn resolve_commit_specs_empty_input_yields_empty_output() {
let tmp = tempfile::TempDir::new().unwrap();
init_repo_with_chain(tmp.path(), 1);
let repo = gix::open(tmp.path()).expect("gix::open");
let out = resolve_commit_specs(Some(&repo), &[], "test");
assert!(out.is_empty(), "empty input must yield empty output");
}
#[test]
fn resolve_commit_specs_mixed_inputs_resolve_per_entry() {
let tmp = tempfile::TempDir::new().unwrap();
let chain = init_repo_with_chain(tmp.path(), 1);
let head = chain[0];
let repo = gix::open(tmp.path()).expect("gix::open");
let raw = vec!["HEAD".to_string(), "abc1234-dirty".to_string()];
let out = resolve_commit_specs(Some(&repo), &raw, "test");
assert_eq!(
out,
vec![
head.to_hex_with_len(7).to_string(),
"abc1234-dirty".to_string(),
],
"HEAD resolves; -dirty input lands literal; order preserved",
);
}
#[test]
fn resolve_commit_specs_head_in_dirty_repo_appends_dirty_suffix() {
let tmp = tempfile::TempDir::new().unwrap();
let chain = init_repo_with_chain(tmp.path(), 1);
let head = chain[0];
let repo = gix::open(tmp.path()).expect("gix::open");
let head_tree = repo.head_tree().expect("head_tree").id;
let mut idx = repo.index_from_tree(&head_tree).expect("index_from_tree");
idx.write(gix::index::write::Options::default())
.expect("write index");
std::fs::write(tmp.path().join("file.txt"), b"original\n").unwrap();
std::fs::write(tmp.path().join("file.txt"), b"modified\n").unwrap();
let raw = vec!["HEAD".to_string()];
let out = resolve_commit_specs(Some(&repo), &raw, "test");
let expected_short = head.to_hex_with_len(7).to_string();
assert_eq!(
out,
vec![format!("{expected_short}-dirty")],
"HEAD in a dirty repo must resolve to <short>-dirty",
);
}
#[test]
fn resolve_commit_specs_non_head_does_not_get_dirty_suffix_in_dirty_repo() {
let tmp = tempfile::TempDir::new().unwrap();
let chain = init_repo_with_chain(tmp.path(), 2);
let parent = chain[0];
let repo = gix::open(tmp.path()).expect("gix::open");
let head_tree = repo.head_tree().expect("head_tree").id;
let mut idx = repo.index_from_tree(&head_tree).expect("index_from_tree");
idx.write(gix::index::write::Options::default())
.expect("write index");
std::fs::write(tmp.path().join("file.txt"), b"v1\n").unwrap();
std::fs::write(tmp.path().join("file.txt"), b"modified\n").unwrap();
let raw = vec!["HEAD~1".to_string()];
let out = resolve_commit_specs(Some(&repo), &raw, "test");
assert_eq!(
out,
vec![parent.to_hex_with_len(7).to_string()],
"HEAD~1 (historical commit) must NOT get -dirty suffix \
even when worktree is dirty",
);
}
#[test]
fn resolve_commit_specs_exclude_parents_resolves_like_include() {
let tmp = tempfile::TempDir::new().unwrap();
let chain = init_repo_with_chain(tmp.path(), 2);
let head = chain[1];
let repo = gix::open(tmp.path()).expect("gix::open");
let raw = vec![format!("{}^!", head.to_hex_with_len(40))];
let out = resolve_commit_specs(Some(&repo), &raw, "test");
assert_eq!(
out,
vec![head.to_hex_with_len(7).to_string()],
"<oid>^! must resolve to the same 7-char short hex as <oid>",
);
}
#[test]
fn resolve_commit_specs_branch_name_resolves_to_tip() {
let tmp = tempfile::TempDir::new().unwrap();
let chain = init_repo_with_chain(tmp.path(), 2);
let parent = chain[0];
let mut repo = gix::open(tmp.path()).expect("gix::open");
let _ = repo.committer_or_set_generic_fallback();
repo.reference(
"refs/heads/feature",
parent,
gix::refs::transaction::PreviousValue::MustNotExist,
"create feature branch for test",
)
.expect("create branch");
let raw = vec!["feature".to_string()];
let out = resolve_commit_specs(Some(&repo), &raw, "test");
assert_eq!(
out,
vec![parent.to_hex_with_len(7).to_string()],
"branch name must resolve to its tip commit",
);
}
#[test]
fn resolve_commit_specs_tag_name_resolves_to_target() {
let tmp = tempfile::TempDir::new().unwrap();
let chain = init_repo_with_chain(tmp.path(), 2);
let parent = chain[0];
let mut repo = gix::open(tmp.path()).expect("gix::open");
let _ = repo.committer_or_set_generic_fallback();
repo.reference(
"refs/tags/v0",
parent,
gix::refs::transaction::PreviousValue::MustNotExist,
"create v0 tag for test",
)
.expect("create tag");
let raw = vec!["v0".to_string()];
let out = resolve_commit_specs(Some(&repo), &raw, "test");
assert_eq!(
out,
vec![parent.to_hex_with_len(7).to_string()],
"tag name must resolve to its target commit",
);
}
#[test]
fn resolve_commit_specs_empty_range_yields_no_entries() {
let tmp = tempfile::TempDir::new().unwrap();
init_repo_with_chain(tmp.path(), 2);
let repo = gix::open(tmp.path()).expect("gix::open");
let raw = vec!["HEAD..HEAD".to_string()];
let out = resolve_commit_specs(Some(&repo), &raw, "test");
assert!(
out.is_empty(),
"HEAD..HEAD must expand to zero commits; got {out:?}",
);
}
#[test]
fn resolve_commit_specs_valid_hex_nonexistent_prefix_falls_through_to_literal() {
let tmp = tempfile::TempDir::new().unwrap();
init_repo_with_chain(tmp.path(), 1);
let repo = gix::open(tmp.path()).expect("gix::open");
let raw = vec!["deadbee".to_string()];
let out = resolve_commit_specs(Some(&repo), &raw, "test");
assert_eq!(
out,
vec!["deadbee".to_string()],
"valid-hex non-existent prefix must pass through as literal",
);
}
#[test]
fn looks_like_literal_hash_accepts_canonical_shapes() {
assert!(looks_like_literal_hash("abc1234"));
assert!(looks_like_literal_hash(
"abcdef0123456789abcdef0123456789abcdef01"
));
assert!(looks_like_literal_hash("abc1234-dirty"));
assert!(looks_like_literal_hash(
"abcdef0123456789abcdef0123456789abcdef01-dirty"
));
}
#[test]
fn looks_like_literal_hash_rejects_revspec_shapes() {
assert!(!looks_like_literal_hash("HEAD"));
assert!(!looks_like_literal_hash("main"));
assert!(!looks_like_literal_hash("HEAD~1"));
assert!(!looks_like_literal_hash("HEAD~3..HEAD"));
assert!(!looks_like_literal_hash("HEAD^"));
assert!(!looks_like_literal_hash("abc123"));
assert!(!looks_like_literal_hash(
"abcdef0123456789abcdef0123456789abcdef0123"
));
assert!(!looks_like_literal_hash(""));
assert!(!looks_like_literal_hash("abc-dirty"));
}
#[test]
fn looks_like_literal_hash_accepts_uppercase_and_mixed_case() {
assert!(looks_like_literal_hash("ABC1234"));
assert!(looks_like_literal_hash("AbC1234"));
assert!(looks_like_literal_hash("ABC1234-dirty"));
assert!(looks_like_literal_hash(
"ABCDEF0123456789ABCDEF0123456789ABCDEF01"
));
}
}