use chrono::{Duration, TimeZone, Utc};
use grex_core::manifest::{
append_event, compact, fold, read_all, Event, PackId, PackState, SCHEMA_VERSION,
};
use grex_core::pack::{parse, PackManifest};
use grex_core::tree::build_graph;
use grex_core::{ClonedRepo, GitBackend, GitError, PackLoader, TreeError};
use proptest::prelude::*;
use std::collections::{BTreeMap, HashMap};
use std::path::{Path, PathBuf};
use tempfile::tempdir;
fn ts(n: i64) -> chrono::DateTime<Utc> {
Utc.with_ymd_and_hms(2026, 4, 19, 10, 0, 0).unwrap() + Duration::seconds(n)
}
fn arb_id() -> impl Strategy<Value = String> {
prop_oneof![
1 => Just("a".to_string()),
1 => Just("b".to_string()),
1 => Just("c".to_string()),
3 => "[a-z]{1,8}".prop_map(String::from),
]
}
fn arb_ts_offset() -> impl Strategy<Value = i64> {
-500i64..5000
}
fn arb_event() -> impl Strategy<Value = Event> {
let add = (arb_id(), arb_ts_offset()).prop_map(|(id, n)| Event::Add {
ts: ts(n),
id: id.clone(),
url: format!("u://{id}"),
path: id,
pack_type: "declarative".into(),
schema_version: SCHEMA_VERSION.into(),
});
let upd = (arb_id(), arb_ts_offset(), "[a-z]{1,5}").prop_map(|(id, n, v)| Event::Update {
ts: ts(n),
id,
field: "ref".into(),
value: serde_json::Value::String(v),
});
let rm = (arb_id(), arb_ts_offset()).prop_map(|(id, n)| Event::Rm { ts: ts(n), id });
let sync = (arb_id(), arb_ts_offset(), "[a-f0-9]{6}").prop_map(|(id, n, sha)| Event::Sync {
ts: ts(n),
id,
sha,
});
prop_oneof![add, upd, rm, sync]
}
fn streamed_fold(events: &[Event]) -> HashMap<PackId, PackState> {
let mut acc: HashMap<PackId, PackState> = HashMap::new();
for ev in events {
let replay = replay_events_from_state(&acc);
let mut next = replay;
next.push(ev.clone());
acc = fold(next);
}
acc
}
fn replay_events_from_state(state: &HashMap<PackId, PackState>) -> Vec<Event> {
let mut out = Vec::with_capacity(state.len() * 3);
let mut ids: Vec<&PackId> = state.keys().collect();
ids.sort();
for id in ids {
let p = &state[id];
out.push(Event::Add {
ts: p.added_at,
id: p.id.clone(),
url: p.url.clone(),
path: p.path.clone(),
pack_type: p.pack_type.clone(),
schema_version: SCHEMA_VERSION.into(),
});
if let Some(r) = &p.ref_spec {
out.push(Event::Update {
ts: p.updated_at,
id: p.id.clone(),
field: "ref".into(),
value: serde_json::Value::String(r.clone()),
});
}
if let Some(sha) = &p.last_sync_sha {
out.push(Event::Sync { ts: p.updated_at, id: p.id.clone(), sha: sha.clone() });
}
}
out
}
proptest! {
#[test]
fn streamed_fold_matches_batch_fold(events in prop::collection::vec(arb_event(), 0..40)) {
let batch = fold(events.clone());
let streamed = streamed_fold(&events);
prop_assert_eq!(batch, streamed);
}
#[test]
fn fold_persist_then_read_matches_fold_direct(
events in prop::collection::vec(arb_event(), 0..40),
) {
let dir = tempdir().unwrap();
let p = dir.path().join(".grex/events.jsonl");
for ev in &events {
append_event(&p, ev).unwrap();
}
let direct = fold(events);
let round_tripped = fold(read_all(&p).unwrap());
prop_assert_eq!(direct, round_tripped);
}
#[test]
fn compaction_preserves_fold(events in prop::collection::vec(arb_event(), 0..40)) {
let dir = tempdir().unwrap();
let p = dir.path().join(".grex/events.jsonl");
std::fs::create_dir_all(p.parent().unwrap()).unwrap();
for ev in &events {
append_event(&p, ev).unwrap();
}
let before = fold(read_all(&p).unwrap());
compact(&p).unwrap();
let after = fold(read_all(&p).unwrap());
prop_assert_eq!(before, after);
}
#[test]
fn rm_then_update_is_noop(n in 0i64..1000) {
let events = vec![
Event::Add {
ts: ts(n),
id: "a".into(),
url: "u".into(),
path: "a".into(),
pack_type: "declarative".into(),
schema_version: SCHEMA_VERSION.into(),
},
Event::Rm { ts: ts(n+1), id: "a".into() },
Event::Update {
ts: ts(n+2),
id: "a".into(),
field: "ref".into(),
value: serde_json::json!("v"),
},
];
prop_assert!(fold(events).is_empty());
}
#[test]
fn update_is_idempotent(
id in arb_id(),
n in 0i64..1000,
v in "[a-z]{1,8}",
) {
let add = Event::Add {
ts: ts(n),
id: id.clone(),
url: "u".into(),
path: id.clone(),
pack_type: "declarative".into(),
schema_version: SCHEMA_VERSION.into(),
};
let upd = Event::Update {
ts: ts(n + 1),
id: id.clone(),
field: "ref".into(),
value: serde_json::Value::String(v),
};
let once = fold(vec![add.clone(), upd.clone()]);
let twice = fold(vec![add, upd.clone(), upd]);
prop_assert_eq!(once, twice);
}
#[test]
fn rm_is_absorbing(
id in arb_id(),
n in 0i64..1000,
v in "[a-z]{1,8}",
sha in "[a-f0-9]{6}",
) {
let add = Event::Add {
ts: ts(n),
id: id.clone(),
url: "u".into(),
path: id.clone(),
pack_type: "declarative".into(),
schema_version: SCHEMA_VERSION.into(),
};
let rm = Event::Rm { ts: ts(n + 1), id: id.clone() };
let baseline = fold(vec![add.clone(), rm.clone()]);
let with_noise = fold(vec![
add,
rm,
Event::Update {
ts: ts(n + 2),
id: id.clone(),
field: "ref".into(),
value: serde_json::Value::String(v),
},
Event::Sync { ts: ts(n + 3), id, sha },
]);
prop_assert_eq!(baseline, with_noise);
}
#[test]
fn add_rm_add_cycle(
id in arb_id(),
n in 0i64..500,
gap in 1i64..500,
) {
let t0 = ts(n);
let t1 = ts(n + gap);
let t2 = ts(n + 2 * gap);
let events = vec![
Event::Add {
ts: t0,
id: id.clone(),
url: "u1".into(),
path: "p1".into(),
pack_type: "declarative".into(),
schema_version: SCHEMA_VERSION.into(),
},
Event::Rm { ts: t1, id: id.clone() },
Event::Add {
ts: t2,
id: id.clone(),
url: "u2".into(),
path: "p2".into(),
pack_type: "imperative".into(),
schema_version: SCHEMA_VERSION.into(),
},
];
let st = fold(events);
let p = &st[&id];
prop_assert_eq!(&p.url, "u2");
prop_assert_eq!(&p.path, "p2");
prop_assert_eq!(&p.pack_type, "imperative");
prop_assert_eq!(p.added_at, t2);
prop_assert_eq!(p.updated_at, t2);
prop_assert_eq!(p.ref_spec.as_deref(), None);
prop_assert_eq!(p.last_sync_sha.as_deref(), None);
}
}
const PACK_IDS: &[&str] = &["a", "b", "c", "d", "e"];
struct InMemPackLoader {
root_path: PathBuf,
by_id: BTreeMap<String, PackManifest>,
root: PackManifest,
}
impl PackLoader for InMemPackLoader {
fn load(&self, path: &Path) -> Result<PackManifest, TreeError> {
if path == self.root_path {
return Ok(self.root.clone());
}
let id = path
.file_name()
.and_then(|s| s.to_str())
.ok_or_else(|| TreeError::ManifestNotFound(path.to_path_buf()))?;
self.by_id.get(id).cloned().ok_or_else(|| TreeError::ManifestNotFound(path.to_path_buf()))
}
}
struct NoopGit;
impl GitBackend for NoopGit {
fn name(&self) -> &'static str {
"proptest-noop-git"
}
fn clone(&self, _url: &str, dest: &Path, _ref: Option<&str>) -> Result<ClonedRepo, GitError> {
Ok(ClonedRepo { path: dest.to_path_buf(), head_sha: "0".repeat(40) })
}
fn fetch(&self, _dest: &Path) -> Result<(), GitError> {
Ok(())
}
fn checkout(&self, _dest: &Path, _ref: &str) -> Result<(), GitError> {
Ok(())
}
fn head_sha(&self, _dest: &Path) -> Result<String, GitError> {
Ok("0".repeat(40))
}
}
fn arb_distinct_children(max_len: usize) -> impl Strategy<Value = Vec<String>> {
prop::collection::vec(any::<bool>(), PACK_IDS.len()).prop_map(move |mask| {
mask.iter()
.enumerate()
.filter_map(|(i, keep)| if *keep { Some(PACK_IDS[i].to_string()) } else { None })
.take(max_len)
.collect()
})
}
fn arb_pack_defs() -> impl Strategy<Value = BTreeMap<String, Vec<String>>> {
let strategies: Vec<_> = PACK_IDS.iter().map(|_| arb_distinct_children(4)).collect();
strategies.prop_map(|kids_per_pack| {
PACK_IDS.iter().map(|id| id.to_string()).zip(kids_per_pack).collect()
})
}
fn render_manifest_yaml(name: &str, kids: &[String]) -> String {
let mut yaml = format!("schema_version: \"1\"\nname: {name}\ntype: meta\n");
if kids.is_empty() {
yaml.push_str("children: []\n");
} else {
yaml.push_str("children:\n");
for k in kids {
yaml.push_str(&format!(" - url: https://e.com/{k}.git\n"));
}
}
yaml
}
proptest! {
#![proptest_config(ProptestConfig {
cases: 128,
max_shrink_iters: 64,
..ProptestConfig::default()
})]
#[test]
fn random_dag_dichotomy(
defs in arb_pack_defs(),
root_kids in arb_distinct_children(4),
) {
let mut by_id = BTreeMap::new();
for (id, kids) in &defs {
let m = parse(&render_manifest_yaml(id, kids))
.expect("generated child manifest must parse");
by_id.insert(id.clone(), m);
}
let root = parse(&render_manifest_yaml("root", &root_kids))
.expect("generated root manifest must parse");
let dir = tempdir().unwrap();
let root_path = dir.path().to_path_buf();
let loader = InMemPackLoader { root_path: root_path.clone(), by_id, root };
let backend = NoopGit;
let result = build_graph(&root_path, &backend, &loader, None);
match result {
Ok(_graph) => {} Err(TreeError::CycleDetected { chain }) => {
prop_assert!(!chain.is_empty(), "cycle chain must be non-empty");
let last = chain.last().unwrap();
let recurs = chain.iter().take(chain.len() - 1).any(|s| s == last);
prop_assert!(recurs, "last element must recur earlier in chain: {chain:?}");
}
Err(other) => {
prop_assert!(
false,
"third outcome from build_graph: {other:?} (expected Ok or CycleDetected)",
);
}
}
}
}
#[test]
fn add_rm_add_has_later_added_at() {
let events = vec![
Event::Add {
ts: ts(0),
id: "a".into(),
url: "u".into(),
path: "a".into(),
pack_type: "declarative".into(),
schema_version: SCHEMA_VERSION.into(),
},
Event::Rm { ts: ts(1), id: "a".into() },
Event::Add {
ts: ts(5),
id: "a".into(),
url: "u2".into(),
path: "a".into(),
pack_type: "declarative".into(),
schema_version: SCHEMA_VERSION.into(),
},
];
let st = fold(events);
assert_eq!(st["a"].added_at, ts(5));
assert_eq!(st["a"].url, "u2");
}