#![allow(clippy::future_not_send)]
use std::io::{self, BufRead, Write};
use std::path::{Path, PathBuf};
use panproto_vcs::{FsStore, ObjectId, Store};
use panproto_xrpc::NodeClient;
use rustc_hash::FxHashMap;
#[allow(async_fn_in_trait)]
trait RemoteClient {
async fn remote_pull(&self, store: &mut FsStore) -> Result<(), Box<dyn std::error::Error>>;
async fn remote_push(&self, store: &FsStore) -> Result<(), Box<dyn std::error::Error>>;
async fn remote_get_ref(
&self,
ref_name: &str,
) -> Result<Option<ObjectId>, Box<dyn std::error::Error>>;
async fn remote_set_ref(
&self,
ref_name: &str,
old_target: Option<&ObjectId>,
new_target: &ObjectId,
protocol: &str,
commit_count: u64,
) -> Result<(), Box<dyn std::error::Error>>;
}
impl RemoteClient for NodeClient {
async fn remote_pull(&self, store: &mut FsStore) -> Result<(), Box<dyn std::error::Error>> {
self.pull(store).await?;
Ok(())
}
async fn remote_push(&self, store: &FsStore) -> Result<(), Box<dyn std::error::Error>> {
self.push(store).await?;
Ok(())
}
async fn remote_get_ref(
&self,
ref_name: &str,
) -> Result<Option<ObjectId>, Box<dyn std::error::Error>> {
Ok(self.get_ref(ref_name).await?)
}
async fn remote_set_ref(
&self,
ref_name: &str,
old_target: Option<&ObjectId>,
new_target: &ObjectId,
protocol: &str,
commit_count: u64,
) -> Result<(), Box<dyn std::error::Error>> {
self.set_ref(ref_name, old_target, new_target, protocol, commit_count)
.await?;
Ok(())
}
}
fn run_warm(revspec: &str) {
match cmd_warm(revspec) {
Ok(n) => {
eprintln!("git-remote-panproto: warm cache imported {n} new commit(s)");
}
Err(e) => {
eprintln!("git-remote-panproto: warm failed: {e}");
std::process::exit(1);
}
}
}
fn run_install_hooks() {
match cmd_install_hooks() {
Ok(path) => {
eprintln!(
"git-remote-panproto: installed post-commit hook at {}",
path.display()
);
}
Err(e) => {
eprintln!("git-remote-panproto: install-hooks failed: {e}");
std::process::exit(1);
}
}
}
fn main() {
let args: Vec<String> = std::env::args().collect();
match args.get(1).map(String::as_str) {
Some("warm") => {
run_warm(args.get(2).map_or("HEAD", String::as_str));
return;
}
Some("install-hooks") => {
run_install_hooks();
return;
}
_ => {}
}
if args.len() < 3 {
eprintln!("usage: git-remote-panproto <remote> <url>");
eprintln!(" git-remote-panproto warm [<revspec>]");
eprintln!(" git-remote-panproto install-hooks");
std::process::exit(1);
}
let remote_name = &args[1];
let url = &args[2];
let client = match NodeClient::from_url(url) {
Ok(c) => c,
Err(e) => {
eprintln!("error: {e}");
std::process::exit(1);
}
};
let token = std::env::var("PANPROTO_PUSH_TOKEN")
.or_else(|_| std::env::var("PANPROTO_TOKEN"))
.or_else(|_| std::env::var("COSPAN_PUSH_TOKEN"))
.or_else(|_| std::env::var("COSPAN_TOKEN"));
let client = match token {
Ok(token) => client.with_token(&token),
Err(_) => client,
};
let rt = tokio::runtime::Runtime::new().unwrap_or_else(|e| {
eprintln!("error creating tokio runtime: {e}");
std::process::exit(1);
});
let git_dir = std::env::var("GIT_DIR").unwrap_or_else(|_| ".git".to_owned());
let local_git_repo = match git2::Repository::open(&git_dir) {
Ok(r) => r,
Err(e) => {
eprintln!("error opening local git repo at {git_dir}: {e}");
std::process::exit(1);
}
};
let panproto_cache = Path::new(&git_dir).join("panproto-cache").join(remote_name);
let legacy_cache = Path::new(&git_dir).join("cospan-cache").join(remote_name);
let cache_dir =
if !panproto_cache.join(".panproto").is_dir() && legacy_cache.join(".panproto").is_dir() {
legacy_cache
} else {
panproto_cache
};
let warm_dir = warm_cache_dir(Path::new(&git_dir));
run_remote_helper(
&rt,
&client,
&local_git_repo,
&cache_dir,
warm_dir.as_deref(),
);
}
fn run_remote_helper(
rt: &tokio::runtime::Runtime,
client: &NodeClient,
local_git_repo: &git2::Repository,
cache_dir: &Path,
warm_dir: Option<&Path>,
) {
let stdin = io::stdin();
let stdout = io::stdout();
let mut out = io::BufWriter::new(stdout.lock());
for line in stdin.lock().lines() {
let line = match line {
Ok(l) => l,
Err(e) => {
eprintln!("error reading stdin: {e}");
break;
}
};
let line = line.trim();
if line.is_empty() {
let _ = writeln!(out);
let _ = out.flush();
continue;
}
if line == "capabilities" {
let _ = writeln!(out, "fetch");
let _ = writeln!(out, "push");
let _ = writeln!(out);
let _ = out.flush();
continue;
}
if line == "list" || line == "list for-push" {
match rt.block_on(cmd_list(client)) {
Ok(refs) => {
for (id, name) in &refs {
let _ = writeln!(out, "{id} {name}");
}
let _ = writeln!(out);
let _ = out.flush();
}
Err(e) => {
eprintln!("error listing refs: {e}");
break;
}
}
continue;
}
if let Some(rest) = line.strip_prefix("fetch ") {
let parts: Vec<&str> = rest.splitn(2, ' ').collect();
if parts.len() == 2 {
match rt.block_on(cmd_fetch(client, parts[1], local_git_repo, cache_dir)) {
Ok(()) => {}
Err(e) => {
eprintln!("error fetching {}: {e}", parts[1]);
}
}
}
continue;
}
if let Some(rest) = line.strip_prefix("push ") {
let dst = push_refspec_dst(rest);
match rt.block_on(cmd_push(client, rest, local_git_repo, cache_dir, warm_dir)) {
Ok(()) => {
let _ = writeln!(out, "ok {dst}");
}
Err(e) => {
eprintln!("git-remote-panproto: push {dst} failed: {e}");
let _ = writeln!(out, "error {dst} {e}");
}
}
continue;
}
eprintln!("git-remote-panproto: unknown command: {line}");
}
}
async fn cmd_list(client: &NodeClient) -> Result<Vec<(String, String)>, panproto_xrpc::XrpcError> {
let refs = client.list_refs().await?;
let mut result: Vec<(String, String)> = Vec::new();
for (name, id) in refs {
result.push((id.to_string(), name));
}
let head = client.get_head().await?;
match head {
panproto_vcs::HeadState::Branch(branch) => {
result.push((format!("@refs/heads/{branch}"), "HEAD".to_owned()));
}
panproto_vcs::HeadState::Detached(id) => {
result.push((id.to_string(), "HEAD".to_owned()));
}
}
Ok(result)
}
async fn cmd_fetch<C: RemoteClient>(
client: &C,
ref_name: &str,
git_repo: &git2::Repository,
cache_dir: &Path,
) -> Result<(), Box<dyn std::error::Error>> {
let mut store = open_or_init_cache(cache_dir)?;
client.remote_pull(&mut store).await?;
let report = fetch_export_stage(&store, git_repo, cache_dir, ref_name)?;
if report.commits_exported > 0 {
eprintln!(
"git-remote-panproto: fetched {}/{} new commits for {ref_name}",
report.commits_exported, report.commits_walked
);
}
if let Some(oid) = report.tip_git_oid {
eprintln!("git-remote-panproto: {ref_name} tip = {oid}");
}
Ok(())
}
#[derive(Debug, Default)]
struct FetchExportReport {
commits_exported: usize,
commits_walked: usize,
tip_git_oid: Option<git2::Oid>,
}
fn fetch_export_stage<S: Store>(
store: &S,
git_repo: &git2::Repository,
cache_dir: &Path,
ref_name: &str,
) -> Result<FetchExportReport, Box<dyn std::error::Error>> {
let tip_id = store
.get_ref(ref_name)?
.ok_or_else(|| format!("ref {ref_name} not found in store"))?;
let marks_path = marks_path(cache_dir);
let git_marks = load_marks(&marks_path);
let mut panproto_to_git: FxHashMap<ObjectId, git2::Oid> = git_marks
.iter()
.filter_map(|(g, p)| {
if git_repo.find_commit(*g).is_ok() {
Some((*p, *g))
} else {
None
}
})
.collect();
let topo_order = topo_walk_from(store, tip_id)?;
let commits_walked = topo_order.len();
let mut new_marks: Vec<(git2::Oid, ObjectId)> = Vec::new();
let mut last_exported: Option<git2::Oid> = None;
for (panproto_id, _) in &topo_order {
if panproto_to_git.contains_key(panproto_id) {
continue;
}
let result =
panproto_git::export_to_git(store, git_repo, *panproto_id, &panproto_to_git, None)?;
panproto_to_git.insert(*panproto_id, result.git_oid);
new_marks.push((result.git_oid, *panproto_id));
last_exported = Some(result.git_oid);
}
if !new_marks.is_empty() {
append_marks(&marks_path, &new_marks)?;
}
let tip_git_oid = last_exported.or_else(|| panproto_to_git.get(&tip_id).copied());
Ok(FetchExportReport {
commits_exported: new_marks.len(),
commits_walked,
tip_git_oid,
})
}
enum TopoFrame {
Enter(ObjectId),
Emit(ObjectId, Box<panproto_vcs::CommitObject>),
}
fn load_commit<S: Store>(
store: &S,
id: ObjectId,
) -> Result<panproto_vcs::CommitObject, Box<dyn std::error::Error>> {
match store.get(&id)? {
panproto_vcs::Object::Commit(c) => Ok(c),
other => Err(format!(
"topo_walk: expected commit at {id}, got {}",
other.type_name()
)
.into()),
}
}
fn topo_walk_from<S: Store>(
store: &S,
tip: ObjectId,
) -> Result<Vec<(ObjectId, panproto_vcs::CommitObject)>, Box<dyn std::error::Error>> {
use std::collections::HashSet;
let mut result: Vec<(ObjectId, panproto_vcs::CommitObject)> = Vec::new();
let mut visited: HashSet<ObjectId> = HashSet::default();
let mut emitted: HashSet<ObjectId> = HashSet::default();
let mut stack: Vec<TopoFrame> = vec![TopoFrame::Enter(tip)];
while let Some(frame) = stack.pop() {
match frame {
TopoFrame::Enter(id) => {
if !visited.insert(id) {
continue;
}
let commit = load_commit(store, id)?;
let parents: Vec<ObjectId> = commit.parents.clone();
stack.push(TopoFrame::Emit(id, Box::new(commit)));
for parent in parents.iter().rev() {
if !visited.contains(parent) {
stack.push(TopoFrame::Enter(*parent));
}
}
}
TopoFrame::Emit(id, commit) => {
if !emitted.insert(id) {
continue;
}
result.push((id, *commit));
}
}
}
Ok(result)
}
async fn cmd_push<C: RemoteClient>(
client: &C,
refspec: &str,
git_repo: &git2::Repository,
cache_dir: &Path,
warm_dir: Option<&Path>,
) -> Result<(), Box<dyn std::error::Error>> {
let parts: Vec<&str> = refspec.splitn(2, ':').collect();
let src = parts.first().copied().unwrap_or("HEAD");
let dst = parts.get(1).copied().unwrap_or(src);
let mut store = open_or_init_cache(cache_dir)?;
let report = push_import_stage(&mut store, git_repo, cache_dir, src, dst, warm_dir)?;
if report.new_commits > 0 {
eprintln!(
"git-remote-panproto: imported {} new commits ({} total)",
report.new_commits, report.total_commits
);
}
client.remote_push(&store).await?;
let remote_target = client.remote_get_ref(dst).await?;
client
.remote_set_ref(
dst,
remote_target.as_ref(),
&report.head_id,
"project",
u64::try_from(report.total_commits).unwrap_or(0),
)
.await?;
Ok(())
}
#[derive(Debug)]
struct PushImportReport {
head_id: ObjectId,
new_commits: usize,
total_commits: usize,
}
fn push_import_stage(
store: &mut FsStore,
git_repo: &git2::Repository,
cache_dir: &Path,
src: &str,
dst: &str,
warm_dir: Option<&Path>,
) -> Result<PushImportReport, Box<dyn std::error::Error>> {
let marks_path = marks_path(cache_dir);
let mut known = load_marks(&marks_path);
let previously_known = known.len();
let warm_marks_merged = if let Some(warm_dir) = warm_dir {
copy_warm_into(store, &mut known, warm_dir)?
} else {
0
};
let import_result = panproto_git::import_git_repo_incremental(git_repo, store, src, &known)?;
let mut to_append = import_result.oid_map.clone();
if warm_marks_merged > 0 {
let existing = load_marks(&marks_path);
for (git_oid, panproto_id) in &known {
if !existing.contains_key(git_oid) {
to_append.push((*git_oid, *panproto_id));
}
}
}
if !to_append.is_empty() {
append_marks(&marks_path, &to_append)?;
}
if import_result.head_id != ObjectId::ZERO {
store.set_ref(dst, import_result.head_id)?;
}
Ok(PushImportReport {
head_id: import_result.head_id,
new_commits: import_result.commit_count,
total_commits: previously_known + warm_marks_merged + import_result.commit_count,
})
}
fn warm_cache_dir(git_dir: &Path) -> Option<PathBuf> {
let p = git_dir.join("panproto-cache").join("warm");
if p.join(".panproto").is_dir() {
Some(p)
} else {
None
}
}
fn copy_warm_into(
dst_store: &mut FsStore,
known: &mut FxHashMap<git2::Oid, ObjectId>,
warm_dir: &Path,
) -> Result<usize, Box<dyn std::error::Error>> {
let warm_store = FsStore::open(warm_dir)?;
for id in warm_store.list_objects()? {
if !dst_store.has(&id) {
let obj = warm_store.get(&id)?;
dst_store.put(&obj)?;
}
}
let warm_marks = load_marks(&marks_path(warm_dir));
let mut added = 0;
for (git_oid, panproto_id) in warm_marks {
if known.insert(git_oid, panproto_id).is_none() {
added += 1;
}
}
Ok(added)
}
fn cmd_warm(revspec: &str) -> Result<usize, Box<dyn std::error::Error>> {
let git_dir = std::env::var("GIT_DIR").unwrap_or_else(|_| ".git".to_owned());
let git_repo = git2::Repository::open(&git_dir)?;
let warm_dir = Path::new(&git_dir).join("panproto-cache").join("warm");
let mut store = open_or_init_cache(&warm_dir)?;
let marks_path = marks_path(&warm_dir);
let known = load_marks(&marks_path);
let import_result =
panproto_git::import_git_repo_incremental(&git_repo, &mut store, revspec, &known)?;
if !import_result.oid_map.is_empty() {
append_marks(&marks_path, &import_result.oid_map)?;
}
Ok(import_result.commit_count)
}
const HOOK_SENTINEL: &str = "# BEGIN panproto-git-remote post-commit hook";
const HOOK_END_SENTINEL: &str = "# END panproto-git-remote post-commit hook";
fn cmd_install_hooks() -> Result<PathBuf, Box<dyn std::error::Error>> {
let git_dir = std::env::var("GIT_DIR").unwrap_or_else(|_| ".git".to_owned());
let hooks_dir = Path::new(&git_dir).join("hooks");
std::fs::create_dir_all(&hooks_dir)?;
let hook_path = hooks_dir.join("post-commit");
let snippet = format!(
"{HOOK_SENTINEL}\n\
git-remote-panproto warm HEAD >/dev/null 2>&1 || true\n\
{HOOK_END_SENTINEL}\n"
);
let new_contents = if hook_path.exists() {
let existing = std::fs::read_to_string(&hook_path)?;
if existing.contains(HOOK_SENTINEL) {
return Ok(hook_path);
}
if existing.starts_with("#!") {
let mut lines = existing.splitn(2, '\n');
let shebang = lines.next().unwrap_or("#!/bin/sh");
let rest = lines.next().unwrap_or("");
format!("{shebang}\n{snippet}{rest}")
} else {
format!("{existing}\n{snippet}")
}
} else {
format!("#!/bin/sh\n{snippet}")
};
std::fs::write(&hook_path, new_contents)?;
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let mut perms = std::fs::metadata(&hook_path)?.permissions();
perms.set_mode(0o755);
std::fs::set_permissions(&hook_path, perms)?;
}
Ok(hook_path)
}
fn push_refspec_dst(refspec: &str) -> &str {
refspec
.split_once(':')
.map_or(refspec, |(_, dst)| dst)
.trim_start_matches('+')
.trim()
}
fn marks_path(cache_dir: &Path) -> PathBuf {
cache_dir.join("git-marks.txt")
}
fn open_or_init_cache(cache_dir: &Path) -> Result<FsStore, Box<dyn std::error::Error>> {
if cache_dir.join(".panproto").is_dir() {
Ok(FsStore::open(cache_dir)?)
} else {
std::fs::create_dir_all(cache_dir)?;
Ok(FsStore::init(cache_dir)?)
}
}
fn load_marks(marks_path: &Path) -> FxHashMap<git2::Oid, ObjectId> {
let mut map = FxHashMap::default();
let Ok(content) = std::fs::read_to_string(marks_path) else {
return map;
};
for line in content.lines() {
let mut parts = line.split_whitespace();
let Some(git_hex) = parts.next() else {
continue;
};
let Some(panproto_hex) = parts.next() else {
continue;
};
let Ok(git_oid) = git2::Oid::from_str(git_hex) else {
continue;
};
let Ok(panproto_id) = panproto_hex.parse::<ObjectId>() else {
continue;
};
map.insert(git_oid, panproto_id);
}
map
}
fn append_marks(
marks_path: &Path,
entries: &[(git2::Oid, ObjectId)],
) -> Result<(), Box<dyn std::error::Error>> {
if let Some(parent) = marks_path.parent() {
std::fs::create_dir_all(parent)?;
}
let mut file = std::fs::OpenOptions::new()
.create(true)
.append(true)
.open(marks_path)?;
for (git_oid, panproto_id) in entries {
writeln!(file, "{git_oid} {panproto_id}")?;
}
Ok(())
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod tests {
use std::path::Path;
use super::{
RemoteClient, append_marks, cmd_fetch, cmd_push, fetch_export_stage, load_marks,
marks_path, open_or_init_cache, push_import_stage, push_refspec_dst,
};
use panproto_vcs::{FsStore, MemStore, Object, ObjectId, Store};
use std::cell::RefCell;
fn fake_panproto_id(seed: u8) -> ObjectId {
ObjectId::from_bytes([seed; 32])
}
fn fake_git_oid(hex_char: char) -> git2::Oid {
let s: String = std::iter::repeat_n(hex_char, 40).collect();
git2::Oid::from_str(&s).unwrap()
}
#[test]
fn push_refspec_dst_extracts_destination() {
assert_eq!(
push_refspec_dst("refs/heads/main:refs/heads/main"),
"refs/heads/main"
);
assert_eq!(
push_refspec_dst("refs/heads/feature:refs/heads/main"),
"refs/heads/main"
);
assert_eq!(
push_refspec_dst("+refs/heads/main:refs/heads/main"),
"refs/heads/main"
);
assert_eq!(push_refspec_dst(":refs/heads/gone"), "refs/heads/gone");
assert_eq!(push_refspec_dst("refs/heads/main"), "refs/heads/main");
}
#[test]
fn marks_path_is_next_to_cache_dir() {
let p = marks_path(std::path::Path::new("/tmp/cache"));
assert_eq!(p, std::path::PathBuf::from("/tmp/cache/git-marks.txt"));
}
#[test]
fn load_marks_missing_file_returns_empty() {
let dir = tempfile::tempdir().unwrap();
let path = dir.path().join("nonexistent.txt");
let result = load_marks(&path);
assert!(result.is_empty());
}
#[test]
fn load_marks_parses_well_formed_entries() {
let dir = tempfile::tempdir().unwrap();
let path = dir.path().join("marks.txt");
let git_a = fake_git_oid('a');
let git_b = fake_git_oid('b');
let pan_a = fake_panproto_id(0x11);
let pan_b = fake_panproto_id(0x22);
let content = format!("{git_a} {pan_a}\n{git_b} {pan_b}\n");
std::fs::write(&path, content).unwrap();
let marks = load_marks(&path);
assert_eq!(marks.len(), 2);
assert_eq!(marks.get(&git_a).copied(), Some(pan_a));
assert_eq!(marks.get(&git_b).copied(), Some(pan_b));
}
#[test]
fn load_marks_skips_malformed_lines() {
let dir = tempfile::tempdir().unwrap();
let path = dir.path().join("marks.txt");
let git_a = fake_git_oid('a');
let pan_a = fake_panproto_id(0x11);
let content = format!(
"\n\
onlyonefield\n\
zz {pan_a}\n\
{git_a} not_a_hash\n\
{git_a} {pan_a}\n"
);
std::fs::write(&path, content).unwrap();
let marks = load_marks(&path);
assert_eq!(marks.len(), 1);
assert_eq!(marks.get(&git_a).copied(), Some(pan_a));
}
#[test]
fn append_marks_creates_file_and_round_trips_via_load() {
let dir = tempfile::tempdir().unwrap();
let path = dir.path().join("nested/subdir/marks.txt");
let git_a = fake_git_oid('a');
let pan_a = fake_panproto_id(0x33);
append_marks(&path, &[(git_a, pan_a)]).unwrap();
assert!(path.exists());
let marks = load_marks(&path);
assert_eq!(marks.len(), 1);
assert_eq!(marks.get(&git_a).copied(), Some(pan_a));
}
#[test]
fn append_marks_appends_to_existing_file() {
let dir = tempfile::tempdir().unwrap();
let path = dir.path().join("marks.txt");
let git_a = fake_git_oid('a');
let git_b = fake_git_oid('b');
let pan_a = fake_panproto_id(0x11);
let pan_b = fake_panproto_id(0x22);
append_marks(&path, &[(git_a, pan_a)]).unwrap();
append_marks(&path, &[(git_b, pan_b)]).unwrap();
let marks = load_marks(&path);
assert_eq!(marks.len(), 2);
assert_eq!(marks.get(&git_a).copied(), Some(pan_a));
assert_eq!(marks.get(&git_b).copied(), Some(pan_b));
}
#[test]
fn append_marks_duplicate_entries_keeps_latest() {
let dir = tempfile::tempdir().unwrap();
let path = dir.path().join("marks.txt");
let git_a = fake_git_oid('a');
let pan_old = fake_panproto_id(0x11);
let pan_new = fake_panproto_id(0x22);
append_marks(&path, &[(git_a, pan_old)]).unwrap();
append_marks(&path, &[(git_a, pan_new)]).unwrap();
let marks = load_marks(&path);
assert_eq!(marks.len(), 1);
assert_eq!(
marks.get(&git_a).copied(),
Some(pan_new),
"later entry should override earlier one"
);
}
#[test]
fn open_or_init_cache_creates_fresh_store() {
let dir = tempfile::tempdir().unwrap();
let cache = dir.path().join("cache");
assert!(!cache.join(".panproto").exists());
let mut store = open_or_init_cache(&cache).unwrap();
assert!(cache.join(".panproto").is_dir());
let id = fake_panproto_id(0x77);
store.set_ref("refs/heads/test", id).unwrap();
assert_eq!(store.get_ref("refs/heads/test").unwrap(), Some(id));
}
#[test]
fn open_or_init_cache_reopens_existing_store() {
let dir = tempfile::tempdir().unwrap();
let cache = dir.path().join("cache");
{
let mut store = open_or_init_cache(&cache).unwrap();
let id = fake_panproto_id(0x55);
store.set_ref("refs/heads/persistent", id).unwrap();
}
let store = open_or_init_cache(&cache).unwrap();
assert_eq!(
store.get_ref("refs/heads/persistent").unwrap(),
Some(fake_panproto_id(0x55))
);
}
fn linear_git_history(n: usize) -> (tempfile::TempDir, git2::Repository, Vec<git2::Oid>) {
let dir = tempfile::tempdir().unwrap();
let repo = git2::Repository::init(dir.path()).unwrap();
let sig = git2::Signature::new("Tester", "tester@example.com", &git2::Time::new(1000, 0))
.unwrap();
let file_path = dir.path().join("main.py");
let mut commit_oids = Vec::new();
let mut parent: Option<git2::Oid> = None;
for i in 0..n {
std::fs::write(&file_path, format!("x = {i}\n").as_bytes()).unwrap();
let mut index = repo.index().unwrap();
index.add_path(Path::new("main.py")).unwrap();
index.write().unwrap();
let tree_oid = index.write_tree().unwrap();
let tree = repo.find_tree(tree_oid).unwrap();
let parent_commit = parent.map(|p| repo.find_commit(p).unwrap());
let parents: Vec<&git2::Commit<'_>> = parent_commit.iter().collect();
let new_oid = repo
.commit(
Some("HEAD"),
&sig,
&sig,
&format!("commit {i}"),
&tree,
&parents,
)
.unwrap();
commit_oids.push(new_oid);
parent = Some(new_oid);
}
(dir, repo, commit_oids)
}
fn append_commit(repo: &git2::Repository, dir: &Path, n: usize) -> git2::Oid {
let sig = git2::Signature::new("Tester", "tester@example.com", &git2::Time::new(1000, 0))
.unwrap();
let file_path = dir.join("main.py");
std::fs::write(&file_path, format!("x = {n}\n").as_bytes()).unwrap();
let mut index = repo.index().unwrap();
index.add_path(Path::new("main.py")).unwrap();
index.write().unwrap();
let tree_oid = index.write_tree().unwrap();
let tree = repo.find_tree(tree_oid).unwrap();
let parent = repo.head().unwrap().peel_to_commit().unwrap();
repo.commit(
Some("HEAD"),
&sig,
&sig,
&format!("commit {n}"),
&tree,
&[&parent],
)
.unwrap()
}
#[test]
fn push_stage_first_run_imports_full_history() {
let (_git_dir, git_repo, oids) = linear_git_history(3);
let cache_tmp = tempfile::tempdir().unwrap();
let cache = cache_tmp.path().join("cache");
let mut store = open_or_init_cache(&cache).unwrap();
let report = push_import_stage(
&mut store,
&git_repo,
&cache,
"HEAD",
"refs/heads/main",
None,
)
.unwrap();
assert_eq!(report.new_commits, 3);
assert_eq!(report.total_commits, 3);
assert_ne!(report.head_id, ObjectId::ZERO);
assert_eq!(
store.get_ref("refs/heads/main").unwrap(),
Some(report.head_id)
);
let marks = load_marks(&marks_path(&cache));
assert_eq!(marks.len(), 3);
for oid in &oids {
assert!(marks.contains_key(oid), "marks missing git OID {oid}");
}
}
#[test]
fn push_stage_second_run_is_noop_when_nothing_new() {
let (_git_dir, git_repo, _oids) = linear_git_history(2);
let cache_tmp = tempfile::tempdir().unwrap();
let cache = cache_tmp.path().join("cache");
let mut store = open_or_init_cache(&cache).unwrap();
let first = push_import_stage(
&mut store,
&git_repo,
&cache,
"HEAD",
"refs/heads/main",
None,
)
.unwrap();
assert_eq!(first.new_commits, 2);
let second = push_import_stage(
&mut store,
&git_repo,
&cache,
"HEAD",
"refs/heads/main",
None,
)
.unwrap();
assert_eq!(second.new_commits, 0);
assert_eq!(second.total_commits, 2, "total should still reflect both");
assert_eq!(
second.head_id, first.head_id,
"head should be preserved across noop push"
);
}
#[test]
fn push_stage_imports_only_new_commits_after_extension() {
let (git_dir, git_repo, _oids) = linear_git_history(2);
let cache_tmp = tempfile::tempdir().unwrap();
let cache = cache_tmp.path().join("cache");
let mut store = open_or_init_cache(&cache).unwrap();
let first = push_import_stage(
&mut store,
&git_repo,
&cache,
"HEAD",
"refs/heads/main",
None,
)
.unwrap();
assert_eq!(first.new_commits, 2);
drop(store);
append_commit(&git_repo, git_dir.path(), 99);
let mut store = open_or_init_cache(&cache).unwrap();
let second = push_import_stage(
&mut store,
&git_repo,
&cache,
"HEAD",
"refs/heads/main",
None,
)
.unwrap();
assert_eq!(second.new_commits, 1, "expected just the new commit");
assert_eq!(second.total_commits, 3);
assert_ne!(second.head_id, first.head_id);
let marks = load_marks(&marks_path(&cache));
assert_eq!(marks.len(), 3);
assert_eq!(
store.get_ref("refs/heads/main").unwrap(),
Some(second.head_id)
);
}
#[test]
fn warm_cache_short_circuits_reparse_on_first_push() {
let (_git_dir, git_repo, _oids) = linear_git_history(3);
let warm_tmp = tempfile::tempdir().unwrap();
let warm = warm_tmp.path().join("warm");
let mut warm_store = open_or_init_cache(&warm).unwrap();
let warm_report = push_import_stage(
&mut warm_store,
&git_repo,
&warm,
"HEAD",
"refs/heads/main",
None,
)
.unwrap();
assert_eq!(warm_report.new_commits, 3);
let warm_object_count = warm_store.list_objects().unwrap().len();
assert!(warm_object_count > 0);
drop(warm_store);
let cache_tmp = tempfile::tempdir().unwrap();
let cache = cache_tmp.path().join("cache");
let mut store = open_or_init_cache(&cache).unwrap();
let report = push_import_stage(
&mut store,
&git_repo,
&cache,
"HEAD",
"refs/heads/main",
Some(&warm),
)
.unwrap();
assert_eq!(
report.new_commits, 0,
"warm cache should short-circuit reparse"
);
assert_eq!(
report.total_commits, 3,
"total should include the warm-contributed marks"
);
assert_eq!(
store.list_objects().unwrap().len(),
warm_object_count,
"every warm object should now live in the per-remote store"
);
assert_eq!(
store.get_ref("refs/heads/main").unwrap(),
Some(report.head_id)
);
let marks = load_marks(&marks_path(&cache));
assert_eq!(marks.len(), 3);
}
#[test]
fn push_stage_uses_dst_ref_name_not_hardcoded_main() {
let (_git_dir, git_repo, _oids) = linear_git_history(1);
let cache_tmp = tempfile::tempdir().unwrap();
let cache = cache_tmp.path().join("cache");
let mut store = open_or_init_cache(&cache).unwrap();
let report = push_import_stage(
&mut store,
&git_repo,
&cache,
"HEAD",
"refs/heads/feature",
None,
)
.unwrap();
assert_eq!(
store.get_ref("refs/heads/feature").unwrap(),
Some(report.head_id)
);
assert_eq!(
store.get_ref("refs/heads/main").unwrap(),
None,
"main should not have been written when dst is feature"
);
}
#[test]
fn fetch_stage_full_export_on_first_run() {
let (_src_dir, src_repo, _oids) = linear_git_history(3);
let cache_tmp = tempfile::tempdir().unwrap();
let cache = cache_tmp.path().join("cache");
let mut store = open_or_init_cache(&cache).unwrap();
push_import_stage(
&mut store,
&src_repo,
&cache,
"HEAD",
"refs/heads/main",
None,
)
.unwrap();
std::fs::remove_file(marks_path(&cache)).unwrap();
let dst_tmp = tempfile::tempdir().unwrap();
let dst_repo = git2::Repository::init(dst_tmp.path()).unwrap();
let report = fetch_export_stage(&store, &dst_repo, &cache, "refs/heads/main").unwrap();
assert_eq!(report.commits_walked, 3);
assert_eq!(report.commits_exported, 3);
assert!(report.tip_git_oid.is_some());
let tip = dst_repo.find_commit(report.tip_git_oid.unwrap()).unwrap();
assert_eq!(tip.parent_count(), 1);
let middle = tip.parent(0).unwrap();
assert_eq!(middle.parent_count(), 1);
let root = middle.parent(0).unwrap();
assert_eq!(root.parent_count(), 0);
let marks = load_marks(&marks_path(&cache));
assert_eq!(marks.len(), 3);
}
#[test]
fn fetch_stage_is_idempotent() {
let (_src_dir, src_repo, _oids) = linear_git_history(2);
let cache_tmp = tempfile::tempdir().unwrap();
let cache = cache_tmp.path().join("cache");
let mut store = open_or_init_cache(&cache).unwrap();
push_import_stage(
&mut store,
&src_repo,
&cache,
"HEAD",
"refs/heads/main",
None,
)
.unwrap();
std::fs::remove_file(marks_path(&cache)).unwrap();
let dst_tmp = tempfile::tempdir().unwrap();
let dst_repo = git2::Repository::init(dst_tmp.path()).unwrap();
let first = fetch_export_stage(&store, &dst_repo, &cache, "refs/heads/main").unwrap();
assert_eq!(first.commits_exported, 2);
let second = fetch_export_stage(&store, &dst_repo, &cache, "refs/heads/main").unwrap();
assert_eq!(second.commits_exported, 0);
assert_eq!(second.commits_walked, 2, "walk still covers full DAG");
assert_eq!(
second.tip_git_oid, first.tip_git_oid,
"tip git OID should be reported from marks on noop"
);
}
#[test]
fn fetch_stage_exports_only_new_commits_after_extension() {
let (src_dir, src_repo, _oids) = linear_git_history(2);
let cache_tmp = tempfile::tempdir().unwrap();
let cache = cache_tmp.path().join("cache");
let mut store = open_or_init_cache(&cache).unwrap();
let empty: rustc_hash::FxHashMap<git2::Oid, ObjectId> = rustc_hash::FxHashMap::default();
let import1 =
panproto_git::import_git_repo_incremental(&src_repo, &mut store, "HEAD", &empty)
.unwrap();
store.set_ref("refs/heads/main", import1.head_id).unwrap();
let dst_tmp = tempfile::tempdir().unwrap();
let dst_repo = git2::Repository::init(dst_tmp.path()).unwrap();
let first = fetch_export_stage(&store, &dst_repo, &cache, "refs/heads/main").unwrap();
assert_eq!(first.commits_exported, 2);
append_commit(&src_repo, src_dir.path(), 42);
let import2 =
panproto_git::import_git_repo_incremental(&src_repo, &mut store, "HEAD", &empty)
.unwrap();
store.set_ref("refs/heads/main", import2.head_id).unwrap();
let second = fetch_export_stage(&store, &dst_repo, &cache, "refs/heads/main").unwrap();
assert_eq!(second.commits_exported, 1);
assert_eq!(second.commits_walked, 3);
let new_tip = dst_repo.find_commit(second.tip_git_oid.unwrap()).unwrap();
assert_eq!(new_tip.parent_count(), 1);
assert_eq!(new_tip.parent(0).unwrap().id(), first.tip_git_oid.unwrap());
}
#[test]
fn fetch_stage_treats_push_marks_as_already_exported() {
let (_git_dir, repo, _oids) = linear_git_history(2);
let cache_tmp = tempfile::tempdir().unwrap();
let cache = cache_tmp.path().join("cache");
let mut store = open_or_init_cache(&cache).unwrap();
push_import_stage(&mut store, &repo, &cache, "HEAD", "refs/heads/main", None).unwrap();
let report = fetch_export_stage(&store, &repo, &cache, "refs/heads/main").unwrap();
assert_eq!(report.commits_exported, 0);
assert_eq!(report.commits_walked, 2);
assert!(
report.tip_git_oid.is_some(),
"tip should be reported from the push marks even on noop"
);
}
#[test]
fn fetch_stage_preserves_dag_parent_links_via_marks() {
let (_src_dir, src_repo, _oids) = linear_git_history(3);
let cache_tmp = tempfile::tempdir().unwrap();
let cache = cache_tmp.path().join("cache");
let mut store = open_or_init_cache(&cache).unwrap();
push_import_stage(
&mut store,
&src_repo,
&cache,
"HEAD",
"refs/heads/main",
None,
)
.unwrap();
std::fs::remove_file(marks_path(&cache)).unwrap();
let dst_tmp = tempfile::tempdir().unwrap();
let dst_repo = git2::Repository::init(dst_tmp.path()).unwrap();
let report = fetch_export_stage(&store, &dst_repo, &cache, "refs/heads/main").unwrap();
assert_eq!(report.commits_exported, 3);
let marks = load_marks(&marks_path(&cache));
let mut git_oids: std::collections::HashSet<git2::Oid> = marks.keys().copied().collect();
git_oids.insert(report.tip_git_oid.unwrap());
for git_oid in &git_oids {
let commit = dst_repo.find_commit(*git_oid).unwrap();
if commit.parent_count() > 0 {
let parent_id = commit.parent(0).unwrap().id();
assert!(
git_oids.contains(&parent_id),
"exported commit {git_oid} has parent {parent_id} outside the marks set"
);
}
}
}
#[test]
fn fetch_stage_handles_non_monotonic_timestamps() {
use panproto_protocols::raw_file;
use panproto_schema::SchemaBuilder;
use panproto_vcs::{CommitObject, Object as VcsObject};
let proto = raw_file::protocol();
let schema = SchemaBuilder::new(&proto)
.vertex("root", "file", None::<&str>)
.unwrap()
.build()
.unwrap();
let cache_tmp = tempfile::tempdir().unwrap();
let cache = cache_tmp.path().join("cache");
let mut store = open_or_init_cache(&cache).unwrap();
let schema_id = store.put(&VcsObject::Schema(Box::new(schema))).unwrap();
let parent_commit =
CommitObject::builder(schema_id, "project", "Tester", "parent (later timestamp)")
.timestamp(2000)
.build();
let parent_id = store.put(&VcsObject::Commit(parent_commit)).unwrap();
let child_commit =
CommitObject::builder(schema_id, "project", "Tester", "child (earlier timestamp)")
.parents(vec![parent_id])
.timestamp(1000)
.build();
let child_id = store.put(&VcsObject::Commit(child_commit)).unwrap();
store.set_ref("refs/heads/main", child_id).unwrap();
let dst_tmp = tempfile::tempdir().unwrap();
let dst_repo = git2::Repository::init(dst_tmp.path()).unwrap();
let report = fetch_export_stage(&store, &dst_repo, &cache, "refs/heads/main").unwrap();
assert_eq!(report.commits_walked, 2);
assert_eq!(report.commits_exported, 2);
let tip_git_oid = report.tip_git_oid.unwrap();
let tip = dst_repo.find_commit(tip_git_oid).unwrap();
assert_eq!(
tip.parent_count(),
1,
"child git commit must still have its parent wired, despite non-monotonic timestamps"
);
assert_eq!(tip.message().unwrap_or(""), "child (earlier timestamp)");
assert_eq!(
tip.parent(0).unwrap().message().unwrap_or(""),
"parent (later timestamp)"
);
}
#[test]
fn fetch_stage_handles_merge_commit_with_multiple_parents() {
use panproto_protocols::raw_file;
use panproto_schema::SchemaBuilder;
use panproto_vcs::{CommitObject, Object as VcsObject};
let proto = raw_file::protocol();
let schema = SchemaBuilder::new(&proto)
.vertex("root", "file", None::<&str>)
.unwrap()
.build()
.unwrap();
let cache_tmp = tempfile::tempdir().unwrap();
let cache = cache_tmp.path().join("cache");
let mut store = open_or_init_cache(&cache).unwrap();
let schema_id = store.put(&VcsObject::Schema(Box::new(schema))).unwrap();
let root_id = store
.put(&VcsObject::Commit(
CommitObject::builder(schema_id, "project", "T", "root")
.timestamp(1000)
.build(),
))
.unwrap();
let left_id = store
.put(&VcsObject::Commit(
CommitObject::builder(schema_id, "project", "T", "left")
.parents(vec![root_id])
.timestamp(2000)
.build(),
))
.unwrap();
let right_id = store
.put(&VcsObject::Commit(
CommitObject::builder(schema_id, "project", "T", "right")
.parents(vec![root_id])
.timestamp(2000)
.build(),
))
.unwrap();
let merge_id = store
.put(&VcsObject::Commit(
CommitObject::builder(schema_id, "project", "T", "merge")
.parents(vec![left_id, right_id])
.timestamp(3000)
.build(),
))
.unwrap();
store.set_ref("refs/heads/main", merge_id).unwrap();
let dst_tmp = tempfile::tempdir().unwrap();
let dst_repo = git2::Repository::init(dst_tmp.path()).unwrap();
let report = fetch_export_stage(&store, &dst_repo, &cache, "refs/heads/main").unwrap();
assert_eq!(
report.commits_walked, 4,
"should walk all 4 commits (root, left, right, merge)"
);
assert_eq!(report.commits_exported, 4);
let tip = dst_repo.find_commit(report.tip_git_oid.unwrap()).unwrap();
assert_eq!(
tip.parent_count(),
2,
"merge commit should retain both parents in the exported git DAG"
);
assert_eq!(tip.message().unwrap_or(""), "merge");
}
#[test]
fn fetch_stage_drops_stale_marks_that_reference_missing_git_commits() {
let (_src_dir, src_repo, _) = e2e_git_history(2);
let cache_tmp = tempfile::tempdir().unwrap();
let cache = cache_tmp.path().join("cache");
let mut store = open_or_init_cache(&cache).unwrap();
let empty: rustc_hash::FxHashMap<git2::Oid, ObjectId> = rustc_hash::FxHashMap::default();
let import_result =
panproto_git::import_git_repo_incremental(&src_repo, &mut store, "HEAD", &empty)
.unwrap();
store
.set_ref("refs/heads/main", import_result.head_id)
.unwrap();
let stale_a = git2::Oid::from_str("0123456789abcdef0123456789abcdef01234567").unwrap();
let stale_b = git2::Oid::from_str("fedcba9876543210fedcba9876543210fedcba98").unwrap();
let stale_marks = vec![
(stale_a, import_result.oid_map[0].1),
(stale_b, import_result.oid_map[1].1),
];
append_marks(&marks_path(&cache), &stale_marks).unwrap();
assert_eq!(load_marks(&marks_path(&cache)).len(), 2);
let dst_tmp = tempfile::tempdir().unwrap();
let dst_repo = git2::Repository::init(dst_tmp.path()).unwrap();
let report = fetch_export_stage(&store, &dst_repo, &cache, "refs/heads/main").unwrap();
assert_eq!(
report.commits_exported, 2,
"both commits should be re-exported when all marks are stale"
);
assert_eq!(report.commits_walked, 2);
let tip = dst_repo.find_commit(report.tip_git_oid.unwrap()).unwrap();
assert_eq!(
tip.parent_count(),
1,
"re-exported tip must still have its parent wired"
);
let parent = tip.parent(0).unwrap();
assert_ne!(
parent.id(),
stale_a,
"parent should be the freshly-exported root, not the stale OID"
);
assert_ne!(parent.id(), stale_b);
let final_marks = load_marks(&marks_path(&cache));
let valid_panproto_ids: std::collections::HashSet<ObjectId> = final_marks
.iter()
.filter_map(|(g, p)| {
if dst_repo.find_commit(*g).is_ok() {
Some(*p)
} else {
None
}
})
.collect();
assert!(
valid_panproto_ids.contains(&import_result.oid_map[0].1),
"root commit should have a valid git mapping after re-export"
);
assert!(
valid_panproto_ids.contains(&import_result.oid_map[1].1),
"tip commit should have a valid git mapping after re-export"
);
}
#[derive(Clone, Debug)]
enum RemoteCall {
Pull,
Push {
object_ids: std::collections::BTreeSet<ObjectId>,
refs: Vec<(String, ObjectId)>,
},
GetRef {
ref_name: String,
},
SetRef {
ref_name: String,
old_target: Option<ObjectId>,
new_target: ObjectId,
protocol: String,
commit_count: u64,
},
}
struct FakeRemoteClient {
server: RefCell<MemStore>,
calls: RefCell<Vec<RemoteCall>>,
}
impl FakeRemoteClient {
fn new() -> Self {
Self {
server: RefCell::new(MemStore::new()),
calls: RefCell::new(Vec::new()),
}
}
fn calls(&self) -> Vec<RemoteCall> {
self.calls.borrow().clone()
}
fn seed_from(&self, store: &FsStore) {
let mut server = self.server.borrow_mut();
for id in store.list_objects().unwrap() {
let obj = store.get(&id).unwrap();
server.put(&obj).unwrap();
}
for (name, id) in store.list_refs("refs/").unwrap() {
server.set_ref(&name, id).unwrap();
}
}
}
impl RemoteClient for FakeRemoteClient {
async fn remote_pull(&self, store: &mut FsStore) -> Result<(), Box<dyn std::error::Error>> {
self.calls.borrow_mut().push(RemoteCall::Pull);
let server = self.server.borrow();
for id in server.list_objects()? {
if !store.has(&id) {
let obj = server.get(&id)?;
store.put(&obj)?;
}
}
for (name, id) in server.list_refs("refs/")? {
store.set_ref(&name, id)?;
}
Ok(())
}
async fn remote_push(&self, store: &FsStore) -> Result<(), Box<dyn std::error::Error>> {
let object_ids: std::collections::BTreeSet<ObjectId> =
store.list_objects()?.into_iter().collect();
let mut refs = store.list_refs("refs/")?;
refs.sort_by(|a, b| a.0.cmp(&b.0));
self.calls.borrow_mut().push(RemoteCall::Push {
object_ids: object_ids.clone(),
refs: refs.clone(),
});
let mut server = self.server.borrow_mut();
for id in &object_ids {
if !server.has(id) {
let obj: Object = store.get(id)?;
server.put(&obj)?;
}
}
for (name, id) in refs {
server.set_ref(&name, id)?;
}
Ok(())
}
async fn remote_get_ref(
&self,
ref_name: &str,
) -> Result<Option<ObjectId>, Box<dyn std::error::Error>> {
self.calls.borrow_mut().push(RemoteCall::GetRef {
ref_name: ref_name.to_owned(),
});
Ok(self.server.borrow().get_ref(ref_name)?)
}
async fn remote_set_ref(
&self,
ref_name: &str,
old_target: Option<&ObjectId>,
new_target: &ObjectId,
protocol: &str,
commit_count: u64,
) -> Result<(), Box<dyn std::error::Error>> {
self.calls.borrow_mut().push(RemoteCall::SetRef {
ref_name: ref_name.to_owned(),
old_target: old_target.copied(),
new_target: *new_target,
protocol: protocol.to_owned(),
commit_count,
});
self.server.borrow_mut().set_ref(ref_name, *new_target)?;
Ok(())
}
}
fn e2e_git_history(n: usize) -> (tempfile::TempDir, git2::Repository, Vec<git2::Oid>) {
let dir = tempfile::tempdir().unwrap();
let repo = git2::Repository::init(dir.path()).unwrap();
let file_path = dir.path().join("main.py");
let mut commit_oids = Vec::new();
let mut parent: Option<git2::Oid> = None;
for i in 0..n {
let sig = git2::Signature::new(
"Tester",
"tester@example.com",
&git2::Time::new(1000 + i64::try_from(i).unwrap(), 0),
)
.unwrap();
std::fs::write(&file_path, format!("x = {i}\n").as_bytes()).unwrap();
let mut index = repo.index().unwrap();
index.add_path(Path::new("main.py")).unwrap();
index.write().unwrap();
let tree_oid = index.write_tree().unwrap();
let tree = repo.find_tree(tree_oid).unwrap();
let parent_commit = parent.map(|p| repo.find_commit(p).unwrap());
let parents: Vec<&git2::Commit<'_>> = parent_commit.iter().collect();
let new_oid = repo
.commit(
Some("HEAD"),
&sig,
&sig,
&format!("commit {i}"),
&tree,
&parents,
)
.unwrap();
commit_oids.push(new_oid);
parent = Some(new_oid);
}
(dir, repo, commit_oids)
}
fn run_async<F, T>(fut: F) -> T
where
F: std::future::Future<Output = T>,
{
tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap()
.block_on(fut)
}
#[test]
fn cmd_push_end_to_end_calls_remote_in_expected_order() {
let (_git_dir, git_repo, _oids) = e2e_git_history(2);
let cache_tmp = tempfile::tempdir().unwrap();
let cache = cache_tmp.path().join("cache");
let fake = FakeRemoteClient::new();
run_async(cmd_push(
&fake,
"HEAD:refs/heads/main",
&git_repo,
&cache,
None,
))
.unwrap();
let calls = fake.calls();
assert_eq!(calls.len(), 3);
assert!(matches!(calls[0], RemoteCall::Push { .. }));
match &calls[1] {
RemoteCall::GetRef { ref_name } => assert_eq!(ref_name, "refs/heads/main"),
other => panic!("expected GetRef, got {other:?}"),
}
match &calls[2] {
RemoteCall::SetRef {
ref_name,
new_target,
old_target,
protocol,
commit_count,
} => {
assert_eq!(ref_name, "refs/heads/main");
assert_eq!(protocol, "project");
assert_eq!(*commit_count, 2, "two commits total on first push");
assert_ne!(*new_target, ObjectId::ZERO);
assert_eq!(
*old_target,
Some(*new_target),
"trailing SetRef CAS should match what remote_push wrote"
);
}
other => panic!("expected SetRef, got {other:?}"),
}
}
#[test]
fn cmd_push_end_to_end_push_payload_contains_local_ref() {
let (_git_dir, git_repo, _oids) = e2e_git_history(1);
let cache_tmp = tempfile::tempdir().unwrap();
let cache = cache_tmp.path().join("cache");
let fake = FakeRemoteClient::new();
run_async(cmd_push(
&fake,
"HEAD:refs/heads/feature",
&git_repo,
&cache,
None,
))
.unwrap();
let calls = fake.calls();
match &calls[0] {
RemoteCall::Push { object_ids, refs } => {
assert!(!object_ids.is_empty(), "push should have some objects");
assert!(
refs.iter().any(|(name, _)| name == "refs/heads/feature"),
"local ref should be refs/heads/feature, got {refs:?}"
);
assert!(
!refs.iter().any(|(name, _)| name == "refs/heads/main"),
"main should NOT be set when dst is feature"
);
}
other => panic!("expected Push first, got {other:?}"),
}
}
#[test]
fn cmd_push_end_to_end_second_call_grows_commit_count_and_head() {
let (git_dir, git_repo, _oids) = e2e_git_history(1);
let cache_tmp = tempfile::tempdir().unwrap();
let cache = cache_tmp.path().join("cache");
let fake = FakeRemoteClient::new();
run_async(cmd_push(
&fake,
"HEAD:refs/heads/main",
&git_repo,
&cache,
None,
))
.unwrap();
let first_new_target = match fake.calls().last() {
Some(RemoteCall::SetRef {
new_target,
commit_count,
..
}) => {
assert_eq!(*commit_count, 1, "first push: one commit total");
*new_target
}
_ => panic!("expected trailing SetRef from first push"),
};
append_commit(&git_repo, git_dir.path(), 2);
run_async(cmd_push(
&fake,
"HEAD:refs/heads/main",
&git_repo,
&cache,
None,
))
.unwrap();
match fake.calls().last() {
Some(RemoteCall::SetRef {
old_target,
new_target,
commit_count,
..
}) => {
assert_ne!(
*new_target, first_new_target,
"head should advance to the new tip"
);
assert_eq!(*commit_count, 2, "total commit count should now be 2");
assert_eq!(*old_target, Some(*new_target));
}
other => panic!("expected trailing SetRef, got {other:?}"),
}
}
#[test]
fn cmd_fetch_end_to_end_pulls_and_exports_history() {
let (_src_git, src_repo, _) = e2e_git_history(3);
let push_cache_tmp = tempfile::tempdir().unwrap();
let push_cache = push_cache_tmp.path().join("cache");
let push_fake = FakeRemoteClient::new();
run_async(cmd_push(
&push_fake,
"HEAD:refs/heads/main",
&src_repo,
&push_cache,
None,
))
.unwrap();
let fetch_fake = FakeRemoteClient::new();
let pushed_server_store = open_or_init_cache(&push_cache).unwrap();
fetch_fake.seed_from(&pushed_server_store);
let fetch_cache_tmp = tempfile::tempdir().unwrap();
let fetch_cache = fetch_cache_tmp.path().join("cache");
let dst_tmp = tempfile::tempdir().unwrap();
let dst_repo = git2::Repository::init(dst_tmp.path()).unwrap();
run_async(cmd_fetch(
&fetch_fake,
"refs/heads/main",
&dst_repo,
&fetch_cache,
))
.unwrap();
let calls = fetch_fake.calls();
assert_eq!(calls.len(), 1);
assert!(matches!(calls[0], RemoteCall::Pull));
let marks = load_marks(&marks_path(&fetch_cache));
assert_eq!(marks.len(), 3, "all three commits should have marks");
for oid in marks.keys() {
assert!(
dst_repo.find_commit(*oid).is_ok(),
"marks entry {oid} missing from dst repo"
);
}
}
#[test]
fn cmd_fetch_end_to_end_second_fetch_is_noop() {
let (_src_git, src_repo, _) = e2e_git_history(2);
let push_cache_tmp = tempfile::tempdir().unwrap();
let push_cache = push_cache_tmp.path().join("cache");
let push_fake = FakeRemoteClient::new();
run_async(cmd_push(
&push_fake,
"HEAD:refs/heads/main",
&src_repo,
&push_cache,
None,
))
.unwrap();
let fetch_fake = FakeRemoteClient::new();
fetch_fake.seed_from(&open_or_init_cache(&push_cache).unwrap());
let fetch_cache_tmp = tempfile::tempdir().unwrap();
let fetch_cache = fetch_cache_tmp.path().join("cache");
let dst_tmp = tempfile::tempdir().unwrap();
let dst_repo = git2::Repository::init(dst_tmp.path()).unwrap();
run_async(cmd_fetch(
&fetch_fake,
"refs/heads/main",
&dst_repo,
&fetch_cache,
))
.unwrap();
let marks_after_first = load_marks(&marks_path(&fetch_cache));
assert_eq!(marks_after_first.len(), 2);
run_async(cmd_fetch(
&fetch_fake,
"refs/heads/main",
&dst_repo,
&fetch_cache,
))
.unwrap();
let marks_after_second = load_marks(&marks_path(&fetch_cache));
assert_eq!(
marks_after_second.len(),
2,
"second fetch should not add any marks"
);
let pulls = fetch_fake
.calls()
.iter()
.filter(|c| matches!(c, RemoteCall::Pull))
.count();
assert_eq!(pulls, 2);
}
#[test]
fn cmd_push_then_fetch_round_trip_preserves_commit_count() {
let (_src_git, src_repo, src_oids) = e2e_git_history(3);
let push_cache_tmp = tempfile::tempdir().unwrap();
let push_cache = push_cache_tmp.path().join("cache");
let fake = FakeRemoteClient::new();
run_async(cmd_push(
&fake,
"HEAD:refs/heads/main",
&src_repo,
&push_cache,
None,
))
.unwrap();
let fetch_cache_tmp = tempfile::tempdir().unwrap();
let fetch_cache = fetch_cache_tmp.path().join("cache");
let dst_tmp = tempfile::tempdir().unwrap();
let dst_repo = git2::Repository::init(dst_tmp.path()).unwrap();
run_async(cmd_fetch(&fake, "refs/heads/main", &dst_repo, &fetch_cache)).unwrap();
let marks = load_marks(&marks_path(&fetch_cache));
assert_eq!(marks.len(), src_oids.len());
let mut chain_len = 0usize;
let mut current: Option<git2::Commit<'_>> = marks
.keys()
.map(|oid| dst_repo.find_commit(*oid).unwrap())
.max_by_key(|c| c.time().seconds());
while let Some(c) = current {
chain_len += 1;
current = if c.parent_count() > 0 {
Some(c.parent(0).unwrap())
} else {
None
};
}
assert_eq!(chain_len, 3, "dst repo should have a 3-commit chain");
}
}