#![allow(clippy::too_many_lines)]
use std::fs;
use std::path::{Path, PathBuf};
use std::sync::OnceLock;
use grex_core::git::gix_backend::file_url_from_path;
use grex_core::manifest;
use grex_core::sync::{self, SyncError, SyncOptions};
use grex_core::tree::error::TreeError;
use tokio_util::sync::CancellationToken;
fn run(
pack_root: &std::path::Path,
opts: &SyncOptions,
) -> Result<grex_core::sync::SyncReport, SyncError> {
sync::run(pack_root, opts, &CancellationToken::new())
}
use grex_core::{ExecResult, StepKind};
use tempfile::TempDir;
fn only_patterns(patterns: &[&str]) -> Vec<String> {
patterns.iter().map(|p| (*p).to_string()).collect()
}
fn init_git_identity() {
static ONCE: OnceLock<()> = OnceLock::new();
ONCE.get_or_init(|| {
std::env::set_var("GIT_AUTHOR_NAME", "grex-test");
std::env::set_var("GIT_AUTHOR_EMAIL", "test@grex.local");
std::env::set_var("GIT_COMMITTER_NAME", "grex-test");
std::env::set_var("GIT_COMMITTER_EMAIL", "test@grex.local");
});
}
fn run_git(cwd: &Path, args: &[&str]) {
let out = std::process::Command::new("git")
.args(args)
.current_dir(cwd)
.output()
.expect("git on PATH");
assert!(
out.status.success(),
"git {:?} failed: {}",
args,
String::from_utf8_lossy(&out.stderr)
);
}
fn bare_with_manifest(tmp: &Path, name: &str, yaml: &str, extra_files: &[(&str, &str)]) -> PathBuf {
init_git_identity();
let work = tmp.join(format!("seed-{name}-work"));
fs::create_dir_all(work.join(".grex")).unwrap();
for (rel, contents) in extra_files {
let p = work.join(rel);
if let Some(parent) = p.parent() {
fs::create_dir_all(parent).unwrap();
}
fs::write(p, contents).unwrap();
}
run_git(&work, &["init", "-q", "-b", "main"]);
run_git(&work, &["config", "user.email", "grex-test@example.com"]);
run_git(&work, &["config", "user.name", "grex-test"]);
fs::write(work.join(".grex/pack.yaml"), yaml).unwrap();
run_git(&work, &["add", "-A"]);
run_git(&work, &["commit", "-q", "-m", "seed"]);
let bare = tmp.join(format!("{name}.git"));
run_git(tmp, &["clone", "-q", "--bare", work.to_str().unwrap(), bare.to_str().unwrap()]);
bare
}
fn write_root(dir: &Path, yaml: &str) {
fs::create_dir_all(dir.join(".grex")).unwrap();
fs::write(dir.join(".grex/pack.yaml"), yaml).unwrap();
}
struct Fixture {
_tmp: TempDir,
root: PathBuf,
workspace: PathBuf,
a_target_dir: PathBuf,
a_symlink_src: PathBuf,
a_symlink_dst: PathBuf,
c_target_dir: PathBuf,
}
fn build_fixture() -> Fixture {
let tmp = TempDir::new().unwrap();
let tmp_path = tmp.path().to_path_buf();
let sink = tmp_path.join("sink");
fs::create_dir_all(&sink).unwrap();
let a_target_dir = sink.join("a-made");
let a_symlink_src = sink.join("src-for-a");
fs::write(&a_symlink_src, b"src").unwrap();
let a_symlink_dst = sink.join("dst-for-a");
let c_target_dir = sink.join("c-made");
let c_yaml = format!(
"schema_version: \"1\"\nname: c\ntype: declarative\nactions:\n - mkdir:\n path: {}\n",
c_target_dir.to_string_lossy().replace('\\', "/"),
);
let c_bare = bare_with_manifest(&tmp_path, "c", &c_yaml, &[]);
let c_url = file_url_from_path(&c_bare);
let b_yaml = format!(
"schema_version: \"1\"\nname: b\ntype: meta\nchildren:\n - url: {c_url}\n path: c\n",
);
let b_bare = bare_with_manifest(&tmp_path, "b", &b_yaml, &[]);
let b_url = file_url_from_path(&b_bare);
let a_yaml = format!(
concat!(
"schema_version: \"1\"\nname: a\ntype: declarative\n",
"actions:\n",
" - mkdir:\n path: {mkdir_path}\n",
" - symlink:\n src: {sym_src}\n dst: {sym_dst}\n kind: file\n",
),
mkdir_path = a_target_dir.to_string_lossy().replace('\\', "/"),
sym_src = a_symlink_src.to_string_lossy().replace('\\', "/"),
sym_dst = a_symlink_dst.to_string_lossy().replace('\\', "/"),
);
let a_bare = bare_with_manifest(&tmp_path, "a", &a_yaml, &[("files/keep.txt", "keep")]);
let a_url = file_url_from_path(&a_bare);
let root_dir = tmp_path.join("root");
let root_yaml = format!(
concat!(
"schema_version: \"1\"\nname: root\ntype: meta\n",
"children:\n",
" - url: {a_url}\n path: a\n",
" - url: {b_url}\n path: b\n",
),
a_url = a_url,
b_url = b_url,
);
write_root(&root_dir, &root_yaml);
let workspace = root_dir.clone();
Fixture {
_tmp: tmp,
root: root_dir,
workspace,
a_target_dir,
a_symlink_src,
a_symlink_dst,
c_target_dir,
}
}
fn options(dry_run: bool, workspace: PathBuf) -> SyncOptions {
SyncOptions::new().with_dry_run(dry_run).with_validate(true).with_workspace(Some(workspace))
}
#[test]
fn e2e_dry_run_3_level_tree() {
let f = build_fixture();
let report = run(&f.root, &options(true, f.workspace.clone())).expect("dry run succeeds");
assert_eq!(report.graph.nodes().len(), 4, "expect root + a + b + c");
let child_edges = report
.graph
.edges()
.iter()
.filter(|e| matches!(e.kind, grex_core::EdgeKind::Child))
.count();
assert_eq!(child_edges, 3, "3 Child edges: root→a, root→b, b→c");
assert!(report.halted.is_none());
assert_eq!(report.steps.len(), 5);
let declarative_steps = report
.steps
.iter()
.filter(|s| matches!(s.exec_step.result, ExecResult::WouldPerformChange))
.count();
assert_eq!(declarative_steps, 3, "3 declarative actions plan: {:?}", report.steps);
let meta_steps =
report.steps.iter().filter(|s| matches!(s.exec_step.result, ExecResult::NoOp)).count();
assert_eq!(meta_steps, 2, "2 meta synthesis NoOp steps: {:?}", report.steps);
assert!(!f.a_target_dir.exists(), "dry-run mkdir must not create dir");
assert!(!f.a_symlink_dst.exists(), "dry-run symlink must not create link");
assert!(!f.c_target_dir.exists(), "dry-run mkdir must not create dir");
}
#[test]
fn e2e_wet_run_3_level_tree() {
let f = build_fixture();
let report = run(&f.root, &options(false, f.workspace.clone())).expect("wet run succeeds");
assert!(report.halted.is_none(), "halted: {:?}", report.halted);
assert_eq!(report.steps.len(), 5);
assert!(f.a_target_dir.is_dir(), "a mkdir should have produced dir");
assert!(f.c_target_dir.is_dir(), "c mkdir should have produced dir");
assert!(
fs::symlink_metadata(&f.a_symlink_dst).map(|m| m.file_type().is_symlink()).unwrap_or(false),
"symlink destination should be a symlink"
);
let log = f.root.join(".grex/events.jsonl");
let events = manifest::read_all(&log).expect("log readable");
assert!(events.len() >= 3, "expected >=3 events, got {}", events.len());
let sym_src_ok = f.a_symlink_src.exists();
assert!(sym_src_ok, "fixture sym src should exist");
}
#[test]
fn e2e_cycle_aborts() {
let tmp = TempDir::new().unwrap();
let tmp_path = tmp.path();
init_git_identity();
let cyc_work = tmp_path.join("cyc-work");
fs::create_dir_all(cyc_work.join(".grex")).unwrap();
run_git(&cyc_work, &["init", "-q", "-b", "main"]);
run_git(&cyc_work, &["config", "user.email", "g@g"]);
run_git(&cyc_work, &["config", "user.name", "g"]);
fs::write(cyc_work.join(".grex/pack.yaml"), "schema_version: \"1\"\nname: cyc\ntype: meta\n")
.unwrap();
run_git(&cyc_work, &["add", "-A"]);
run_git(&cyc_work, &["commit", "-q", "-m", "seed"]);
let cyc_bare = tmp_path.join("cyc.git");
run_git(
tmp_path,
&["clone", "-q", "--bare", cyc_work.to_str().unwrap(), cyc_bare.to_str().unwrap()],
);
let cyc_url = file_url_from_path(&cyc_bare);
let self_yaml = format!(
"schema_version: \"1\"\nname: cyc\ntype: meta\nchildren:\n - url: {cyc_url}\n path: cyc\n",
);
fs::write(cyc_work.join(".grex/pack.yaml"), &self_yaml).unwrap();
run_git(&cyc_work, &["add", "-A"]);
run_git(&cyc_work, &["commit", "-q", "-m", "cycle"]);
run_git(&cyc_work, &["push", "-q", cyc_bare.to_str().unwrap(), "main"]);
let root_dir = tmp_path.join("root");
let root_yaml = format!(
"schema_version: \"1\"\nname: root\ntype: meta\nchildren:\n - url: {cyc_url}\n path: cyc\n",
);
write_root(&root_dir, &root_yaml);
let workspace = root_dir.clone();
let err = run(&root_dir, &options(false, workspace)).unwrap_err();
match err {
SyncError::Tree(TreeError::CycleDetected { chain }) => {
assert!(!chain.is_empty(), "cycle chain must be non-empty");
let first = chain.first().expect("chain non-empty");
let last = chain.last().expect("chain non-empty");
assert!(
chain[..chain.len() - 1].contains(last) || first == last,
"last element must repeat earlier in chain (got chain={chain:?})"
);
assert!(
chain[0].starts_with("path:"),
"B4: chain must start with root path: prefix, got {:?}",
chain[0]
);
assert!(
!chain.iter().any(|s| s.ends_with('@')),
"B2: no identity should end with trailing @, got {chain:?}"
);
}
other => panic!("expected SyncError::Tree(TreeError::CycleDetected), got {other:?}"),
}
}
#[test]
fn e2e_depends_on_unsatisfied() {
let tmp = TempDir::new().unwrap();
let tmp_path = tmp.path();
let root_dir = tmp_path.join("root");
let root_yaml =
"schema_version: \"1\"\nname: root\ntype: meta\ndepends_on:\n - zzz-missing\n".to_string();
write_root(&root_dir, &root_yaml);
let workspace = root_dir.clone();
let err = run(&root_dir, &options(false, workspace)).unwrap_err();
match err {
SyncError::Validation { errors } => {
assert!(
errors.iter().any(|e| format!("{e}").contains("zzz-missing")),
"errors must mention unresolved dep: {errors:?}"
);
}
other => panic!("expected Validation, got {other:?}"),
}
}
#[test]
fn e2e_validation_skip_bypasses_checks() {
let tmp = TempDir::new().unwrap();
let tmp_path = tmp.path();
let root_dir = tmp_path.join("root");
let root_yaml =
"schema_version: \"1\"\nname: root\ntype: meta\ndepends_on:\n - zzz-missing\n".to_string();
write_root(&root_dir, &root_yaml);
let workspace = root_dir.clone();
let opts =
SyncOptions::new().with_dry_run(true).with_validate(false).with_workspace(Some(workspace));
let report = run(&root_dir, &opts).expect("--no-validate must bypass");
assert!(report.halted.is_none());
assert_eq!(report.steps.len(), 1);
assert_eq!(report.graph.nodes().len(), 1);
let _ = std::mem::size_of::<StepKind>();
}
#[test]
fn e2e_only_filter_by_pack_name_runs_just_one_pack() {
let f = build_fixture();
let opts = SyncOptions::new()
.with_workspace(Some(f.workspace.clone()))
.with_only_patterns(Some(only_patterns(&["b/c"])));
let report = run(&f.root, &opts).expect("only-filter sync ok");
assert!(report.halted.is_none(), "halted: {:?}", report.halted);
assert!(f.c_target_dir.is_dir(), "c must have executed");
assert!(!f.a_target_dir.exists(), "a must have been filtered out");
assert!(!f.a_symlink_dst.exists(), "a symlink must have been filtered out");
}
#[test]
fn e2e_only_absolute_path_glob_does_not_match() {
let f = build_fixture();
let abs_a = f.workspace.join("a").display().to_string();
let abs_a = abs_a.replace('\\', "/");
let opts = SyncOptions::new()
.with_workspace(Some(f.workspace.clone()))
.with_only_patterns(Some(vec![abs_a]));
let report = run(&f.root, &opts).expect("only-filter sync ok");
assert!(report.halted.is_none(), "halted: {:?}", report.halted);
assert_eq!(
report.steps.len(),
0,
"absolute-path glob must not match any workspace-relative path: {:?}",
report.steps
);
assert!(!f.a_target_dir.exists(), "a must NOT have executed via absolute glob");
assert!(!f.c_target_dir.exists(), "c must NOT have executed");
}
#[test]
fn e2e_only_filter_multiple_patterns_or_combine() {
let f = build_fixture();
let opts = SyncOptions::new()
.with_workspace(Some(f.workspace.clone()))
.with_only_patterns(Some(only_patterns(&["a", "b/c"])));
let report = run(&f.root, &opts).expect("multi-pattern only sync ok");
assert!(report.halted.is_none(), "halted: {:?}", report.halted);
assert!(f.a_target_dir.is_dir(), "a must have executed");
assert!(f.c_target_dir.is_dir(), "c must have executed");
}
#[test]
fn e2e_only_filter_non_matching_skips_everything() {
let f = build_fixture();
let opts = SyncOptions::new()
.with_workspace(Some(f.workspace.clone()))
.with_only_patterns(Some(only_patterns(&["zzz-no-match"])));
let report = run(&f.root, &opts).expect("non-matching only sync ok");
assert!(report.halted.is_none(), "halted: {:?}", report.halted);
assert_eq!(report.steps.len(), 0, "zero steps when nothing matches");
assert!(!f.a_target_dir.exists(), "a must not have executed");
assert!(!f.c_target_dir.exists(), "c must not have executed");
assert!(!f.a_symlink_dst.exists(), "symlink must not have been created");
let lockfile = f.root.join(".grex/grex.lock.jsonl");
if lockfile.exists() {
let body = fs::read_to_string(&lockfile).unwrap();
assert!(!body.contains("\"id\":"), "no pack entries when nothing matched: {body:?}");
}
}
#[test]
fn e2e_only_filter_matches_workspace_relative_path() {
let f = build_fixture();
let opts = SyncOptions::new()
.with_workspace(Some(f.workspace.clone()))
.with_only_patterns(Some(only_patterns(&["a"])));
let report = run(&f.root, &opts).expect("path-matched only sync ok");
assert!(report.halted.is_none(), "halted: {:?}", report.halted);
assert!(f.a_target_dir.is_dir(), "a must have executed via path match");
assert!(!f.c_target_dir.exists(), "c must have been filtered out");
}
#[test]
fn e2e_only_filter_preserves_prior_lock_entries_for_filtered_packs() {
let f = build_fixture();
let base = SyncOptions::new().with_workspace(Some(f.workspace.clone()));
run(&f.root, &base).expect("sync A ok");
let lockfile_path = f.root.join(".grex/grex.lock.jsonl");
let body_a = fs::read_to_string(&lockfile_path).expect("lockfile exists");
assert!(body_a.contains("\"id\":\"a\""), "pack a in lockfile after A: {body_a}");
assert!(body_a.contains("\"id\":\"c\""), "pack c in lockfile after A: {body_a}");
let only_b = base.clone().with_only_patterns(Some(only_patterns(&["a"])));
run(&f.root, &only_b).expect("sync B ok");
let body_b = fs::read_to_string(&lockfile_path).expect("lockfile exists");
assert!(body_b.contains("\"id\":\"c\""), "pack c preserved across filtered sync: {body_b}");
let r_c = run(&f.root, &base).expect("sync C ok");
let c_skipped = r_c
.steps
.iter()
.any(|s| s.pack == "c" && matches!(s.exec_step.details, StepKind::PackSkipped { .. }));
assert!(
c_skipped,
"pack c must short-circuit on run C (prior lock entry preserved through filtered run B): {:?}",
r_c.steps
);
}
#[test]
fn e2e_force_plus_dry_run_plans_but_does_not_write_lockfile() {
let f = build_fixture();
let base = SyncOptions::new().with_workspace(Some(f.workspace.clone()));
run(&f.root, &base).expect("warm-up ok");
let lockfile_path = f.root.join(".grex/grex.lock.jsonl");
let warm_body = fs::read_to_string(&lockfile_path).expect("warm lockfile");
let dry_force = SyncOptions::new()
.with_dry_run(true)
.with_workspace(Some(f.workspace.clone()))
.with_force(true);
let report = run(&f.root, &dry_force).expect("dry+force sync ok");
assert!(report.halted.is_none(), "halted: {:?}", report.halted);
for s in &report.steps {
assert!(
matches!(
s.exec_step.result,
ExecResult::WouldPerformChange | ExecResult::AlreadySatisfied | ExecResult::NoOp
),
"dry-run must not emit PerformedChange: {s:?}"
);
}
let post_body = fs::read_to_string(&lockfile_path).expect("lockfile still present");
assert_eq!(warm_body, post_body, "dry-run + force must not rewrite lockfile");
}
#[test]
fn e2e_upsert_lock_entry_sha_refreshes_on_commit_sha_change() {
let f = build_fixture();
let base = SyncOptions::new().with_workspace(Some(f.workspace.clone()));
run(&f.root, &base).expect("run 1 ok");
let lockfile_path = f.root.join(".grex/grex.lock.jsonl");
let sha_before = extract_pack_sha(&lockfile_path, "a");
let a_ws = f.workspace.join("a");
run_git(&a_ws, &["config", "user.email", "grex-test@example.com"]);
run_git(&a_ws, &["config", "user.name", "grex-test"]);
run_git(&a_ws, &["commit", "--allow-empty", "-q", "-m", "head-bump"]);
run(&f.root, &base).expect("run 2 ok");
let sha_after = extract_pack_sha(&lockfile_path, "a");
assert_ne!(
sha_before, sha_after,
"commit-SHA change must be persisted to lockfile (before={sha_before:?}, after={sha_after:?})"
);
assert!(
sha_after.as_ref().is_some_and(|s| !s.is_empty()),
"post-bump sha must be non-empty: {sha_after:?}"
);
}
fn extract_pack_sha(lockfile: &Path, pack_name: &str) -> Option<String> {
let body = fs::read_to_string(lockfile).ok()?;
let mut last: Option<String> = None;
let id_tag = format!("\"id\":\"{pack_name}\"");
for line in body.lines() {
if !line.contains(&id_tag) {
continue;
}
let Some(rest) = line.split("\"sha\":\"").nth(1) else { continue };
let Some(end) = rest.find('"') else { continue };
last = Some(rest[..end].to_string());
}
last
}
#[test]
fn e2e_commit_sha_change_invalidates_skip() {
let f = build_fixture();
let opts = SyncOptions::new().with_workspace(Some(f.workspace.clone()));
let r1 = run(&f.root, &opts).expect("first sync ok");
assert!(r1.halted.is_none(), "halted: {:?}", r1.halted);
let a_ws = f.workspace.join("a");
assert!(a_ws.join(".git").exists(), "workspace clone of a present");
run_git(&a_ws, &["config", "user.email", "grex-test@example.com"]);
run_git(&a_ws, &["config", "user.name", "grex-test"]);
run_git(&a_ws, &["commit", "--allow-empty", "-q", "-m", "head-bump"]);
let r2 = run(&f.root, &opts).expect("second sync ok");
assert!(r2.halted.is_none(), "halted: {:?}", r2.halted);
let a_skipped = r2
.steps
.iter()
.any(|s| s.pack == "a" && matches!(s.exec_step.details, StepKind::PackSkipped { .. }));
assert!(!a_skipped, "pack `a` with new commit SHA must re-execute, not skip: {:?}", r2.steps);
let a_executed = r2
.steps
.iter()
.any(|s| s.pack == "a" && !matches!(s.exec_step.details, StepKind::PackSkipped { .. }));
assert!(a_executed, "pack `a` must have executed actions on run 2: {:?}", r2.steps);
}
#[test]
fn e2e_force_bypasses_skip_on_hash() {
let f = build_fixture();
let base = SyncOptions::new().with_workspace(Some(f.workspace.clone()));
run(&f.root, &base).expect("warm-up sync ok");
let r_skip = run(&f.root, &base).expect("second sync ok");
let skipped_count = r_skip
.steps
.iter()
.filter(|s| matches!(s.exec_step.details, StepKind::PackSkipped { .. }))
.count();
assert!(
skipped_count >= 1,
"force=false + unchanged inputs must short-circuit at least one pack (got {}): {:?}",
skipped_count,
r_skip.steps
);
let force_opts = base.clone().with_force(true);
let r_force = run(&f.root, &force_opts).expect("forced sync ok");
let forced_skips = r_force
.steps
.iter()
.filter(|s| matches!(s.exec_step.details, StepKind::PackSkipped { .. }))
.count();
assert_eq!(
forced_skips, 0,
"--force must bypass skip-on-hash; got {} skipped steps: {:?}",
forced_skips, r_force.steps
);
}