use anyhow::Result;
use gix::bstr::BString;
use std::path::Path;
pub struct Repo<'a> {
pub url: &'a str,
pub dst: &'a Path,
pub rev: Option<&'a str>,
}
#[derive(Debug, PartialEq, Eq)]
pub enum RepoStatus {
NotInstalled,
Clean,
Modified,
Error(String),
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct GitChange {
pub from: Option<String>,
pub to: String,
pub subjects: Vec<String>,
pub breaking_subjects: Vec<String>,
pub doc_files_changed: Vec<String>,
}
impl<'a> Repo<'a> {
pub fn new(url: &'a str, dst: &'a Path, rev: Option<&'a str>) -> Self {
Self { url, dst, rev }
}
pub async fn sync(&self) -> Result<Option<GitChange>> {
let url = resolve_url(self.url);
let dst = self.dst.to_path_buf();
let rev = self.rev.map(|s| s.to_string());
tokio::task::spawn_blocking(move || sync_impl(&url, &dst, rev.as_deref()))
.await
.map_err(|e| anyhow::anyhow!("sync task panicked: {}", e))?
}
pub async fn update(&self) -> Result<Option<GitChange>> {
let url = resolve_url(self.url);
let dst = self.dst.to_path_buf();
let rev = self.rev.map(|s| s.to_string());
tokio::task::spawn_blocking(move || update_impl(&url, &dst, rev.as_deref()))
.await
.map_err(|e| anyhow::anyhow!("update task panicked: {}", e))?
}
pub async fn get_status(&self) -> RepoStatus {
let dst = self.dst.to_path_buf();
let rev = self.rev.map(|s| s.to_string());
tokio::task::spawn_blocking(move || get_status_impl(&dst, rev.as_deref()))
.await
.unwrap_or(RepoStatus::Error("status check panicked".to_string()))
}
pub async fn head_commit(&self) -> Result<String> {
let dst = self.dst.to_path_buf();
tokio::task::spawn_blocking(move || read_head(&dst))
.await
.map_err(|e| anyhow::anyhow!("head_commit task panicked: {}", e))?
}
pub async fn checkout_locally(&self, rev: &str) -> Result<Option<GitChange>> {
let dst = self.dst.to_path_buf();
let rev = rev.to_string();
tokio::task::spawn_blocking(move || checkout_local_impl(&dst, &rev))
.await
.map_err(|e| anyhow::anyhow!("checkout_locally task panicked: {}", e))?
}
pub async fn resolve_revision_locally(&self, rev: &str) -> Result<Option<String>> {
let dst = self.dst.to_path_buf();
let rev = rev.to_string();
tokio::task::spawn_blocking(move || resolve_revision_impl(&dst, &rev))
.await
.map_err(|e| anyhow::anyhow!("resolve_revision task panicked: {}", e))?
}
pub async fn remote_head(&self) -> Result<Option<String>> {
let dst = self.dst.to_path_buf();
tokio::task::spawn_blocking(move || read_remote_head(&dst))
.await
.map_err(|e| anyhow::anyhow!("remote_head task panicked: {}", e))?
}
}
fn resolve_url(url: &str) -> String {
if url.contains("://")
|| url.contains('@')
|| url.starts_with('/')
|| url.starts_with('~')
|| url.starts_with('.')
|| url.starts_with('\\')
|| (url.len() >= 2 && url.as_bytes()[1] == b':')
{
return url.to_string();
}
if url.matches('/').count() == 1 && !url.contains(' ') {
format!("https://github.com/{}", url)
} else {
url.to_string()
}
}
fn sync_impl(url: &str, dst: &Path, rev: Option<&str>) -> Result<Option<GitChange>> {
if dst.exists() {
let before = read_head(dst).ok();
fetch_impl(dst)?;
if let Some(rev) = rev {
gix_checkout(dst, rev)?;
} else {
gix_reset_to_remote(dst)?;
}
let after = read_head(dst)?;
Ok(build_change(dst, before, after))
} else {
clone_impl(url, dst)?;
if let Some(rev) = rev {
fetch_impl(dst)?;
gix_checkout(dst, rev)?;
}
let after = read_head(dst)?;
Ok(Some(GitChange {
from: None,
to: after,
subjects: Vec::new(),
breaking_subjects: Vec::new(),
doc_files_changed: Vec::new(),
}))
}
}
fn update_impl(_url: &str, dst: &Path, rev: Option<&str>) -> Result<Option<GitChange>> {
if !dst.exists() {
anyhow::bail!("Plugin not installed: {}", dst.display());
}
let before = read_head(dst).ok();
fetch_impl(dst)?;
if let Some(rev) = rev {
gix_checkout(dst, rev)?;
} else {
gix_reset_to_remote(dst)?;
}
let after = read_head(dst)?;
Ok(build_change(dst, before, after))
}
fn read_head(dst: &Path) -> Result<String> {
let repo = gix::open(dst)?;
let head = repo.head_commit()?;
Ok(head.id().to_string())
}
fn checkout_local_impl(dst: &Path, rev: &str) -> Result<Option<GitChange>> {
if !dst.exists() {
anyhow::bail!("Plugin not installed: {}", dst.display());
}
let before = read_head(dst).ok();
gix_checkout(dst, rev)?;
let after = read_head(dst)?;
Ok(build_change(dst, before, after))
}
fn resolve_revision_impl(dst: &Path, rev: &str) -> Result<Option<String>> {
if !dst.exists() {
return Ok(None);
}
let repo = match gix::open(dst) {
Ok(r) => r,
Err(_) => return Ok(None),
};
let peeled = format!("{}^{{commit}}", rev);
if let Ok(id) = repo.rev_parse_single(&peeled[..]) {
return Ok(Some(id.detach().to_string()));
}
match repo.rev_parse_single(rev) {
Ok(id) => Ok(Some(id.detach().to_string())),
Err(_) => Ok(None),
}
}
fn read_remote_head(dst: &Path) -> Result<Option<String>> {
let repo = gix::open(dst)?;
let remote_name = repo
.find_default_remote(gix::remote::Direction::Fetch)
.and_then(|r| r.ok())
.and_then(|r| r.name().map(|n| n.as_bstr().to_string()))
.unwrap_or_else(|| "origin".to_string());
if let Some(head_name) = repo.head_name()? {
let branch = head_name.as_bstr().to_string();
let tracking = branch.replace("refs/heads/", &format!("refs/remotes/{}/", remote_name));
if let Ok(mut tr) = repo.find_reference(&tracking)
&& let Ok(id) = tr.peel_to_id()
{
return Ok(Some(id.detach().to_string()));
}
}
let remote_head_ref = format!("refs/remotes/{}/HEAD", remote_name);
if let Ok(mut r) = repo.find_reference(&remote_head_ref)
&& let Ok(id) = r.peel_to_id()
{
return Ok(Some(id.detach().to_string()));
}
Ok(None)
}
fn build_change(dst: &Path, before: Option<String>, after: String) -> Option<GitChange> {
match before {
Some(b) if b == after => None,
Some(b) => {
let (subjects, breaking) = collect_subjects_and_breaking(dst, &b, &after);
let doc_files = doc_files_changed(dst, &b, &after);
Some(GitChange {
from: Some(b),
to: after,
subjects,
breaking_subjects: breaking,
doc_files_changed: doc_files,
})
}
None => Some(GitChange {
from: None,
to: after,
subjects: Vec::new(),
breaking_subjects: Vec::new(),
doc_files_changed: Vec::new(),
}),
}
}
fn collect_subjects_and_breaking(dst: &Path, from: &str, to: &str) -> (Vec<String>, Vec<String>) {
let mut subjects = Vec::new();
let mut breaking = Vec::new();
let repo = match gix::open(dst) {
Ok(r) => r,
Err(_) => return (subjects, breaking),
};
let from_id = match repo.rev_parse_single(from) {
Ok(id) => id.detach(),
Err(_) => return (subjects, breaking),
};
let to_id = match repo.rev_parse_single(to) {
Ok(id) => id.detach(),
Err(_) => return (subjects, breaking),
};
let walk = match repo.rev_walk([to_id]).with_hidden([from_id]).all() {
Ok(w) => w,
Err(_) => return (subjects, breaking),
};
const SUBJECT_WALK_LIMIT: usize = 100;
for info in walk.flatten().take(SUBJECT_WALK_LIMIT) {
let commit = match info.object() {
Ok(c) => c,
Err(_) => continue,
};
let message = commit.message_raw_sloppy().to_string();
let (subject, body) = split_subject_body(&message);
let subj_str = subject.trim().to_string();
if subj_str.is_empty() {
continue;
}
let is_break = crate::update_log::is_breaking(&subj_str, body);
if is_break {
breaking.push(subj_str.clone());
}
subjects.push(subj_str);
}
(subjects, breaking)
}
fn split_subject_body(msg: &str) -> (&str, &str) {
if let Some(idx) = msg.find('\n') {
(&msg[..idx], &msg[idx + 1..])
} else {
(msg, "")
}
}
fn doc_files_changed(dst: &Path, from: &str, to: &str) -> Vec<String> {
let Some((_repo, changes)) = open_and_diff(dst, from, to) else {
return Vec::new();
};
let mut files: Vec<String> = changes
.into_iter()
.map(change_location)
.filter(|p| is_doc_path(p))
.collect();
files.sort();
files.dedup();
files
}
fn open_and_diff(
dst: &Path,
from: &str,
to: &str,
) -> Option<(
gix::Repository,
Vec<gix::object::tree::diff::ChangeDetached>,
)> {
let repo = gix::open(dst).ok()?;
let changes = {
let from_id = repo.rev_parse_single(from).ok()?;
let to_id = repo.rev_parse_single(to).ok()?;
let from_tree = repo.find_commit(from_id).ok()?.tree().ok()?;
let to_tree = repo.find_commit(to_id).ok()?.tree().ok()?;
let options = gix::diff::Options::default().with_rewrites(None);
repo.diff_tree_to_tree(Some(&from_tree), Some(&to_tree), Some(options))
.ok()?
};
Some((repo, changes))
}
fn change_location(change: gix::object::tree::diff::ChangeDetached) -> String {
use gix::object::tree::diff::ChangeDetached;
match change {
ChangeDetached::Addition { location, .. }
| ChangeDetached::Deletion { location, .. }
| ChangeDetached::Modification { location, .. }
| ChangeDetached::Rewrite { location, .. } => location.to_string(),
}
}
fn is_doc_path(path: &str) -> bool {
if let Some(rest) = path.strip_prefix("doc/") {
return !rest.is_empty();
}
let top_level = !path.contains('/');
if top_level {
let lower = path.to_ascii_lowercase();
return lower.starts_with("readme") || lower.starts_with("changelog");
}
false
}
pub fn doc_file_patches(
dst: &Path,
from: &str,
to: &str,
paths: &[String],
) -> std::collections::HashMap<String, String> {
let mut out = std::collections::HashMap::new();
let Some((repo, changes)) = open_and_diff(dst, from, to) else {
return out;
};
for path in paths {
if let Some(patch) = build_patch_for_path(&repo, &changes, path) {
out.insert(path.clone(), patch);
}
}
out
}
#[cfg(test)]
fn doc_file_patch(dst: &Path, from: &str, to: &str, path: &str) -> Option<String> {
doc_file_patches(dst, from, to, std::slice::from_ref(&path.to_string())).remove(path)
}
fn build_patch_for_path(
repo: &gix::Repository,
changes: &[gix::object::tree::diff::ChangeDetached],
path: &str,
) -> Option<String> {
use gix::object::tree::diff::ChangeDetached;
let path_bytes = path.as_bytes();
let change = changes.iter().find(|c| match c {
ChangeDetached::Addition { location, .. }
| ChangeDetached::Deletion { location, .. }
| ChangeDetached::Modification { location, .. }
| ChangeDetached::Rewrite { location, .. } => location.as_slice() == path_bytes,
})?;
let read_blob = |oid: gix::ObjectId| repo.find_blob(oid).ok().map(|b| b.detach().data);
let (before, after, before_oid, after_oid) = match *change {
ChangeDetached::Modification {
previous_id, id, ..
} => (
read_blob(previous_id)?,
read_blob(id)?,
previous_id.to_string(),
id.to_string(),
),
ChangeDetached::Addition { id, .. } => (
Vec::new(),
read_blob(id)?,
"0000000".to_string(),
id.to_string(),
),
ChangeDetached::Deletion { id, .. } => (
read_blob(id)?,
Vec::new(),
id.to_string(),
"0000000".to_string(),
),
ChangeDetached::Rewrite { source_id, id, .. } => (
read_blob(source_id)?,
read_blob(id)?,
source_id.to_string(),
id.to_string(),
),
};
Some(format_unified_diff(
path,
&before,
&after,
&before_oid,
&after_oid,
))
}
fn is_binary(buf: &[u8]) -> bool {
let probe = &buf[..buf.len().min(8 * 1024)];
probe.contains(&0u8)
}
fn format_unified_diff(
path: &str,
before: &[u8],
after: &[u8],
before_oid: &str,
after_oid: &str,
) -> String {
use gix::diff::blob::{
Algorithm, Diff, InternedInput, UnifiedDiff,
sources::byte_lines,
unified_diff::{ConsumeHunk, ContextSize, DiffLineKind, HunkHeader},
};
let short = |oid: &str| oid.get(..7).unwrap_or(oid).to_string();
let mut out = String::new();
out.push_str(&format!("diff --git a/{path} b/{path}\n"));
out.push_str(&format!(
"index {}..{}\n",
short(before_oid),
short(after_oid)
));
if is_binary(before) || is_binary(after) {
out.push_str(&format!("Binary files a/{path} and b/{path} differ\n"));
return out;
}
out.push_str(&format!("--- a/{path}\n"));
out.push_str(&format!("+++ b/{path}\n"));
let input = InternedInput::new(byte_lines(before), byte_lines(after));
let mut diff = Diff::compute(Algorithm::Histogram, &input);
diff.postprocess_lines(&input);
struct Sink(String);
impl ConsumeHunk for Sink {
type Out = String;
fn consume_hunk(
&mut self,
header: HunkHeader,
lines: &[(DiffLineKind, &[u8])],
) -> std::io::Result<()> {
self.0.push_str(&format!("{}\n", header));
for (kind, line) in lines {
self.0.push(kind.to_prefix());
self.0.push_str(&String::from_utf8_lossy(line));
if !line.ends_with(b"\n") {
self.0.push('\n');
}
}
Ok(())
}
fn finish(self) -> Self::Out {
self.0
}
}
let body = UnifiedDiff::new(&diff, &input, Sink(String::new()), ContextSize::default())
.consume()
.unwrap_or_default();
out.push_str(&body);
out
}
fn clone_impl(url: &str, dst: &Path) -> Result<()> {
if let Some(parent) = dst.parent() {
std::fs::create_dir_all(parent)?;
}
let (mut _checkout, _outcome) = gix::prepare_clone(url, dst)?
.with_shallow(gix::remote::fetch::Shallow::DepthAtRemote(
std::num::NonZeroU32::new(1).unwrap(),
))
.fetch_then_checkout(gix::progress::Discard, &gix::interrupt::IS_INTERRUPTED)
.map_err(|e| {
let _ = std::fs::remove_dir_all(dst);
anyhow::anyhow!("git clone failed: {}", e)
})?;
_checkout
.main_worktree(gix::progress::Discard, &gix::interrupt::IS_INTERRUPTED)
.map_err(|e| {
let _ = std::fs::remove_dir_all(dst);
anyhow::anyhow!("checkout failed: {}", e)
})?;
ensure_all_branches_refspec(dst)?;
Ok(())
}
fn fetch_impl(dst: &Path) -> Result<()> {
ensure_all_branches_refspec(dst)?;
let repo = gix::open(dst)?;
let remote = repo
.find_default_remote(gix::remote::Direction::Fetch)
.ok_or_else(|| anyhow::anyhow!("no remote configured"))??;
remote
.connect(gix::remote::Direction::Fetch)?
.prepare_fetch(gix::progress::Discard, Default::default())?
.with_shallow(gix::remote::fetch::Shallow::Deepen(1))
.receive(gix::progress::Discard, &gix::interrupt::IS_INTERRUPTED)?;
Ok(())
}
fn ensure_all_branches_refspec(dst: &Path) -> Result<()> {
let config_path = dst.join(".git").join("config");
let content = match std::fs::read_to_string(&config_path) {
Ok(c) => c,
Err(_) => return Ok(()), };
let want = "+refs/heads/*:refs/remotes/origin/*";
if content.contains(want) {
return Ok(());
}
let mut new_content = String::with_capacity(content.len() + 64);
let mut in_origin_section = false;
let mut replaced = false;
let mut pending_origin_fetch_inject = false;
let leading_ws_default = "\t"; for line in content.lines() {
let trimmed = line.trim_start();
let starts_section = trimmed.starts_with('[');
if starts_section && pending_origin_fetch_inject {
new_content.push_str(leading_ws_default);
new_content.push_str("fetch = ");
new_content.push_str(want);
new_content.push('\n');
pending_origin_fetch_inject = false;
replaced = true;
}
if starts_section {
in_origin_section = trimmed.starts_with("[remote \"origin\"]")
|| trimmed.starts_with("[remote 'origin']");
if in_origin_section {
pending_origin_fetch_inject = true;
}
} else if in_origin_section
&& let Some(idx) = trimmed.find("fetch")
&& trimmed[idx..]
.trim_start_matches("fetch")
.trim_start()
.starts_with('=')
{
let leading_ws = &line[..line.len() - line.trim_start().len()];
new_content.push_str(leading_ws);
new_content.push_str("fetch = ");
new_content.push_str(want);
new_content.push('\n');
replaced = true;
pending_origin_fetch_inject = false;
continue;
}
new_content.push_str(line);
new_content.push('\n');
}
if pending_origin_fetch_inject {
new_content.push_str(leading_ws_default);
new_content.push_str("fetch = ");
new_content.push_str(want);
new_content.push('\n');
replaced = true;
}
if !replaced && !new_content.contains("[remote \"origin\"]") {
new_content.push_str("[remote \"origin\"]\n");
new_content.push_str(leading_ws_default);
new_content.push_str("fetch = ");
new_content.push_str(want);
new_content.push('\n');
}
std::fs::write(&config_path, new_content)?;
Ok(())
}
fn gix_checkout(dst: &Path, rev: &str) -> Result<()> {
let repo = gix::open(dst)?;
let direct = repo.rev_parse_single(rev);
let (commit_id, source) = match direct {
Ok(id) => (id.detach(), DirectOrRemote::Direct),
Err(_) => {
let remote_ref = format!("refs/remotes/origin/{rev}");
let remote_id = repo
.find_reference(&remote_ref)
.ok()
.and_then(|mut r| r.peel_to_id().ok())
.ok_or_else(|| anyhow::anyhow!("rev '{}' not found", rev))?;
(remote_id.detach(), DirectOrRemote::FromRemote)
}
};
let branch_ref = format!("refs/heads/{}", rev);
let local_branch_exists = repo.find_reference(&branch_ref).is_ok();
let should_set_branch = local_branch_exists || matches!(source, DirectOrRemote::FromRemote);
if should_set_branch {
let head_path = repo.git_dir().join("HEAD");
std::fs::write(&head_path, format!("ref: {}\n", branch_ref))?;
repo.reference(
branch_ref.as_str(),
commit_id,
gix::refs::transaction::PreviousValue::Any,
BString::from(format!("rvpm: checkout branch {}", rev)),
)?;
} else {
repo.reference(
"HEAD",
commit_id,
gix::refs::transaction::PreviousValue::Any,
BString::from(format!("rvpm: checkout {}", rev)),
)?;
}
gix_checkout_head(&repo)?;
Ok(())
}
#[derive(Debug, Clone, Copy)]
enum DirectOrRemote {
Direct,
FromRemote,
}
fn gix_reset_to_remote(dst: &Path) -> Result<()> {
let repo = gix::open(dst)?;
let remote_name = repo
.find_default_remote(gix::remote::Direction::Fetch)
.and_then(|r| r.ok())
.and_then(|r| r.name().map(|n| n.as_bstr().to_string()))
.unwrap_or_else(|| "origin".to_string());
let target_id = {
let head_name = repo.head_name()?;
let tracking_ref = if let Some(ref name) = head_name {
let branch = name.as_bstr().to_string();
let tracking = branch.replace("refs/heads/", &format!("refs/remotes/{}/", remote_name));
repo.find_reference(&tracking).ok()
} else {
None
};
if let Some(mut tr) = tracking_ref {
tr.peel_to_id()?.detach()
} else {
let remote_head = format!("refs/remotes/{}/HEAD", remote_name);
if let Ok(mut r) = repo.find_reference(&remote_head) {
r.peel_to_id()?.detach()
} else {
return Ok(());
}
}
};
if let Some(head_name) = repo.head_name()? {
repo.reference(
head_name.as_ref(),
target_id,
gix::refs::transaction::PreviousValue::Any,
BString::from("rvpm: fast-forward"),
)?;
} else {
repo.reference(
"HEAD",
target_id,
gix::refs::transaction::PreviousValue::Any,
BString::from("rvpm: fast-forward detached"),
)?;
}
gix_checkout_head(&repo)?;
Ok(())
}
fn gix_checkout_head(repo: &gix::Repository) -> Result<()> {
let workdir = repo
.workdir()
.ok_or_else(|| anyhow::anyhow!("bare repository"))?;
let head = repo.head_commit()?;
let tree_id = head.tree_id()?;
let co_opts =
repo.checkout_options(gix::worktree::stack::state::attributes::Source::IdMapping)?;
let index = gix::index::State::from_tree(&tree_id, &repo.objects, Default::default())
.map_err(|e| anyhow::anyhow!("index from tree: {}", e))?;
let mut index_file = gix::index::File::from_state(index, repo.index_path());
let opts = gix::worktree::state::checkout::Options {
destination_is_initially_empty: false,
overwrite_existing: true,
..co_opts
};
let progress = gix::progress::Discard;
gix::worktree::state::checkout(
&mut index_file,
workdir,
repo.objects.clone().into_arc()?,
&progress,
&progress,
&gix::interrupt::IS_INTERRUPTED,
opts,
)
.map_err(|e| anyhow::anyhow!("checkout failed: {}", e))?;
index_file
.write(Default::default())
.map_err(|e| anyhow::anyhow!("write index: {}", e))?;
Ok(())
}
fn get_status_impl(dst: &Path, rev: Option<&str>) -> RepoStatus {
if !dst.exists() {
return RepoStatus::NotInstalled;
}
let repo = match gix::open(dst) {
Ok(r) => r,
Err(_) => return RepoStatus::Error("Failed to open git repo".to_string()),
};
match repo.is_dirty() {
Ok(true) => return RepoStatus::Modified,
Ok(false) => {}
Err(e) => return RepoStatus::Error(format!("status check failed: {}", e)),
}
if let Some(rev) = rev {
match repo.rev_parse_single(rev) {
Ok(_) => {}
Err(_) => return RepoStatus::Error(format!("rev '{}' not found in local repo", rev)),
}
}
RepoStatus::Clean
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
use tempfile::tempdir;
use tokio::process::Command;
fn git_cmd(dir: &Path) -> Command {
let mut cmd = Command::new("git");
cmd.current_dir(dir)
.env("GIT_CONFIG_NOSYSTEM", "1")
.env("GIT_CONFIG_GLOBAL", dir.join(".gitconfig-test"))
.env("GIT_AUTHOR_NAME", "test")
.env("GIT_AUTHOR_EMAIL", "test@test.com")
.env("GIT_COMMITTER_NAME", "test")
.env("GIT_COMMITTER_EMAIL", "test@test.com");
cmd
}
#[tokio::test]
async fn test_get_status_not_installed() {
let root = tempdir().unwrap();
let dst = root.path().join("nonexistent");
let repo = Repo::new("dummy", &dst, None);
assert_eq!(repo.get_status().await, RepoStatus::NotInstalled);
}
#[tokio::test]
async fn test_get_status_clean() {
let root = tempdir().unwrap();
let src = root.path().join("src");
fs::create_dir_all(&src).unwrap();
git_cmd(&src).args(["init"]).output().await.unwrap();
fs::write(src.join("hello.txt"), "hello").unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "init"])
.output()
.await
.unwrap();
let repo = Repo::new(src.to_str().unwrap(), &src, None);
assert_eq!(repo.get_status().await, RepoStatus::Clean);
}
#[tokio::test]
async fn test_get_status_modified() {
let root = tempdir().unwrap();
let src = root.path().join("src");
fs::create_dir_all(&src).unwrap();
git_cmd(&src).args(["init"]).output().await.unwrap();
fs::write(src.join("hello.txt"), "hello").unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "init"])
.output()
.await
.unwrap();
fs::write(src.join("hello.txt"), "modified").unwrap();
let repo = Repo::new(src.to_str().unwrap(), &src, None);
assert_eq!(repo.get_status().await, RepoStatus::Modified);
}
#[tokio::test]
async fn test_get_status_errors_on_invalid_rev() {
let root = tempdir().unwrap();
let src = root.path().join("src");
fs::create_dir_all(&src).unwrap();
git_cmd(&src).args(["init"]).output().await.unwrap();
fs::write(src.join("hello.txt"), "hello").unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "init"])
.output()
.await
.unwrap();
let repo = Repo::new(src.to_str().unwrap(), &src, Some("nonexistent-rev"));
let status = repo.get_status().await;
assert!(matches!(status, RepoStatus::Error(_)));
}
#[tokio::test]
async fn test_update_fails_when_not_installed() {
let root = tempdir().unwrap();
let dst = root.path().join("nonexistent");
let repo = Repo::new("dummy/repo", &dst, None);
let result = repo.update().await;
assert!(result.is_err());
assert!(result.unwrap_err().to_string().contains("not installed"));
}
#[tokio::test]
async fn test_resolve_url_adds_github_prefix() {
assert_eq!(resolve_url("owner/repo"), "https://github.com/owner/repo");
assert_eq!(
resolve_url("https://github.com/owner/repo"),
"https://github.com/owner/repo"
);
}
#[tokio::test]
async fn test_sync_clones_new_repo() {
let root = tempdir().unwrap();
let src = root.path().join("src");
let dst = root.path().join("dst");
fs::create_dir_all(&src).unwrap();
git_cmd(&src).args(["init"]).output().await.unwrap();
fs::write(src.join("hello.txt"), "hello").unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "init"])
.output()
.await
.unwrap();
let repo = Repo::new(src.to_str().unwrap(), &dst, None);
let change = repo.sync().await.unwrap();
assert!(dst.join("hello.txt").exists());
let content = fs::read_to_string(dst.join("hello.txt")).unwrap();
assert_eq!(content, "hello");
let c = change.expect("new clone should produce a GitChange");
assert!(c.from.is_none());
assert!(!c.to.is_empty());
assert!(c.subjects.is_empty());
}
#[tokio::test]
async fn test_sync_updates_existing_repo() {
let root = tempdir().unwrap();
let src = root.path().join("src");
let dst = root.path().join("dst");
fs::create_dir_all(&src).unwrap();
git_cmd(&src).args(["init"]).output().await.unwrap();
fs::write(src.join("hello.txt"), "hello").unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "init"])
.output()
.await
.unwrap();
let repo = Repo::new(src.to_str().unwrap(), &dst, None);
let initial = repo.sync().await.unwrap();
assert!(initial.is_some(), "first sync = clone produces a change");
let noop = repo.sync().await.unwrap();
assert!(noop.is_none(), "no-op sync should yield None");
fs::write(src.join("hello.txt"), "updated").unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "update"])
.output()
.await
.unwrap();
let updated = repo.sync().await.unwrap().expect("HEAD moved");
assert!(updated.from.is_some(), "from should be the previous HEAD");
assert_ne!(updated.from.as_deref(), Some(updated.to.as_str()));
assert!(
updated.subjects.iter().any(|s| s.contains("update")),
"subjects should contain the new commit, got {:?}",
updated.subjects
);
let content = fs::read_to_string(dst.join("hello.txt")).unwrap();
assert_eq!(content, "updated");
}
#[tokio::test]
async fn test_sync_breaking_commit_detected() {
let root = tempdir().unwrap();
let src = root.path().join("src");
let dst = root.path().join("dst");
fs::create_dir_all(&src).unwrap();
git_cmd(&src).args(["init"]).output().await.unwrap();
fs::write(src.join("hello.txt"), "v1").unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "init"])
.output()
.await
.unwrap();
let repo = Repo::new(src.to_str().unwrap(), &dst, None);
repo.sync().await.unwrap();
fs::write(src.join("hello.txt"), "v2").unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "feat!: redesign"])
.output()
.await
.unwrap();
let change = repo.sync().await.unwrap().expect("HEAD moved");
assert_eq!(change.breaking_subjects.len(), 1, "{:?}", change);
assert!(change.breaking_subjects[0].contains("feat!: redesign"));
}
async fn git_head(dir: &Path) -> String {
let out = git_cmd(dir)
.args(["rev-parse", "HEAD"])
.output()
.await
.unwrap();
String::from_utf8(out.stdout).unwrap().trim().to_string()
}
#[tokio::test]
async fn test_remote_head_reports_tracking_branch_tip() {
let root = tempdir().unwrap();
let src = root.path().join("src");
let dst = root.path().join("dst");
fs::create_dir_all(&src).unwrap();
git_cmd(&src).args(["init"]).output().await.unwrap();
fs::write(src.join("a.txt"), "v1").unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "init"])
.output()
.await
.unwrap();
let initial = git_head(&src).await;
let repo = Repo::new(src.to_str().unwrap(), &dst, None);
repo.sync().await.unwrap();
assert_eq!(
repo.remote_head().await.unwrap().as_deref(),
Some(initial.as_str()),
"fresh clone: remote_head should match HEAD"
);
fs::write(src.join("a.txt"), "v2").unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "advance"])
.output()
.await
.unwrap();
let new_tip = git_head(&src).await;
assert_ne!(new_tip, initial, "remote tip must have moved");
let pinned = Repo::new(src.to_str().unwrap(), &dst, Some(initial.as_str()));
pinned.sync().await.unwrap();
assert_eq!(
pinned.head_commit().await.unwrap(),
initial,
"pinned sync must keep HEAD at the requested rev"
);
let rh = pinned.remote_head().await.unwrap();
assert_eq!(
rh.as_deref(),
Some(new_tip.as_str()),
"remote_head must report the fetched remote tip, not HEAD"
);
assert_ne!(rh.as_deref(), Some(initial.as_str()));
}
#[tokio::test]
async fn test_resolve_revision_locally_handles_sha_branch_tag_and_missing() {
let root = tempdir().unwrap();
let src = root.path().join("src");
let dst = root.path().join("dst");
fs::create_dir_all(&src).unwrap();
git_cmd(&src).args(["init"]).output().await.unwrap();
fs::write(src.join("a.txt"), "seed").unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "init"])
.output()
.await
.unwrap();
git_cmd(&src)
.args(["tag", "v1.0.0"])
.output()
.await
.unwrap();
let head_sha = git_head(&src).await;
let branch = {
let out = git_cmd(&src)
.args(["rev-parse", "--abbrev-ref", "HEAD"])
.output()
.await
.unwrap();
String::from_utf8(out.stdout).unwrap().trim().to_string()
};
let repo = Repo::new(src.to_str().unwrap(), &dst, None);
repo.sync().await.unwrap();
assert_eq!(
repo.resolve_revision_locally(&head_sha).await.unwrap(),
Some(head_sha.clone()),
);
assert_eq!(
repo.resolve_revision_locally(&branch).await.unwrap(),
Some(head_sha.clone()),
);
assert_eq!(
repo.resolve_revision_locally("v1.0.0").await.unwrap(),
Some(head_sha.clone()),
);
assert_eq!(
repo.resolve_revision_locally("no-such-rev").await.unwrap(),
None,
);
}
#[tokio::test]
async fn test_resolve_revision_locally_returns_none_on_missing_clone() {
let root = tempdir().unwrap();
let dst = root.path().join("never-cloned");
let repo = Repo::new("dummy", &dst, None);
assert_eq!(repo.resolve_revision_locally("HEAD").await.unwrap(), None,);
}
#[tokio::test]
async fn test_resolve_revision_locally_peels_annotated_tag_to_commit() {
let root = tempdir().unwrap();
let src = root.path().join("src");
let dst = root.path().join("dst");
fs::create_dir_all(&src).unwrap();
git_cmd(&src).args(["init"]).output().await.unwrap();
fs::write(src.join("a.txt"), "seed").unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "init"])
.output()
.await
.unwrap();
git_cmd(&src)
.args(["tag", "-a", "v2.0.0", "-m", "annotated"])
.output()
.await
.unwrap();
let head_sha = git_head(&src).await;
let repo = Repo::new(src.to_str().unwrap(), &dst, None);
repo.sync().await.unwrap();
assert_eq!(
repo.resolve_revision_locally("v2.0.0").await.unwrap(),
Some(head_sha),
"annotated tag must resolve to the target commit SHA",
);
}
#[tokio::test]
async fn test_checkout_locally_moves_head_to_existing_commit() {
let root = tempdir().unwrap();
let dst = root.path().join("dst");
fs::create_dir_all(&dst).unwrap();
git_cmd(&dst).args(["init"]).output().await.unwrap();
fs::write(dst.join("a.txt"), "v1").unwrap();
git_cmd(&dst).args(["add", "."]).output().await.unwrap();
git_cmd(&dst)
.args(["commit", "-m", "init"])
.output()
.await
.unwrap();
let first = git_head(&dst).await;
fs::write(dst.join("a.txt"), "v2").unwrap();
git_cmd(&dst).args(["add", "."]).output().await.unwrap();
git_cmd(&dst)
.args(["commit", "-m", "bump"])
.output()
.await
.unwrap();
let second = git_head(&dst).await;
let repo = Repo::new("dummy", &dst, None);
assert_eq!(repo.head_commit().await.unwrap(), second);
let change = repo.checkout_locally(&first).await.unwrap();
assert!(
change.is_some(),
"HEAD should have moved, expected a GitChange"
);
assert_eq!(repo.head_commit().await.unwrap(), first);
let change = repo.checkout_locally(&first).await.unwrap();
assert!(change.is_none(), "re-checkout of same rev should be no-op");
}
#[test]
fn ensure_all_branches_refspec_replaces_narrow_default_refspec() {
let tmp = tempdir().unwrap();
let dst = tmp.path();
fs::create_dir_all(dst.join(".git")).unwrap();
let initial = "[remote \"origin\"]\n\turl = https://github.com/foo/bar\n\tfetch = +refs/heads/main:refs/remotes/origin/main\n";
fs::write(dst.join(".git/config"), initial).unwrap();
ensure_all_branches_refspec(dst).unwrap();
let after = fs::read_to_string(dst.join(".git/config")).unwrap();
assert!(
after.contains("+refs/heads/*:refs/remotes/origin/*"),
"should rewrite to all-branch refspec: {after}"
);
assert!(
!after.contains("refs/remotes/origin/main"),
"narrow refspec should be replaced, not duplicated: {after}"
);
}
#[test]
fn ensure_all_branches_refspec_is_idempotent_when_already_correct() {
let tmp = tempdir().unwrap();
let dst = tmp.path();
fs::create_dir_all(dst.join(".git")).unwrap();
let already_correct =
"[remote \"origin\"]\n\turl = x\n\tfetch = +refs/heads/*:refs/remotes/origin/*\n";
fs::write(dst.join(".git/config"), already_correct).unwrap();
ensure_all_branches_refspec(dst).unwrap();
let after = fs::read_to_string(dst.join(".git/config")).unwrap();
assert_eq!(
after.matches("fetch = ").count(),
1,
"refspec should not be duplicated: {after}"
);
}
#[test]
fn ensure_all_branches_refspec_only_touches_origin_section() {
let tmp = tempdir().unwrap();
let dst = tmp.path();
fs::create_dir_all(dst.join(".git")).unwrap();
let mixed = "[remote \"upstream\"]\n\tfetch = +refs/heads/main:refs/remotes/upstream/main\n[remote \"origin\"]\n\tfetch = +refs/heads/main:refs/remotes/origin/main\n";
fs::write(dst.join(".git/config"), mixed).unwrap();
ensure_all_branches_refspec(dst).unwrap();
let after = fs::read_to_string(dst.join(".git/config")).unwrap();
assert!(
after.contains("upstream/main"),
"upstream section must be preserved: {after}"
);
assert!(
after.contains("+refs/heads/*:refs/remotes/origin/*"),
"origin should be normalized: {after}"
);
}
#[test]
fn ensure_all_branches_refspec_inserts_into_origin_when_origin_is_not_last_section() {
let tmp = tempdir().unwrap();
let dst = tmp.path();
fs::create_dir_all(dst.join(".git")).unwrap();
let initial = "[remote \"origin\"]\n\turl = https://github.com/foo/bar\n[branch \"main\"]\n\tremote = origin\n\tmerge = refs/heads/main\n";
fs::write(dst.join(".git/config"), initial).unwrap();
ensure_all_branches_refspec(dst).unwrap();
let after = fs::read_to_string(dst.join(".git/config")).unwrap();
let fetch_pos = after
.find("fetch = +refs/heads/*")
.expect("fetch line written");
let branch_pos = after
.find("[branch \"main\"]")
.expect("branch section preserved");
assert!(
fetch_pos < branch_pos,
"fetch line must be inside [remote \"origin\"], i.e. BEFORE [branch \"main\"]:\n{after}"
);
assert!(after.contains("merge = refs/heads/main"));
}
#[tokio::test]
async fn test_sync_resolves_non_default_branch_via_full_refspec() {
let root = tempdir().unwrap();
let src = root.path().join("src");
let dst = root.path().join("dst");
fs::create_dir_all(&src).unwrap();
git_cmd(&src).args(["init"]).output().await.unwrap();
let init_head = git_cmd(&src)
.args(["symbolic-ref", "--short", "HEAD"])
.output()
.await
.expect("symbolic-ref HEAD just after init");
let default_branch = String::from_utf8_lossy(&init_head.stdout)
.trim()
.to_string();
assert_ne!(
default_branch, "v1",
"test invariant: init default must not be v1"
);
fs::write(src.join("a.txt"), "main-1").unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "main"])
.output()
.await
.unwrap();
git_cmd(&src)
.args(["checkout", "-b", "v1"])
.output()
.await
.unwrap();
fs::write(src.join("a.txt"), "v1-1").unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "v1"])
.output()
.await
.unwrap();
let v1_head = git_head(&src).await;
git_cmd(&src)
.args(["checkout", &default_branch])
.output()
.await
.expect("checkout init default before clone");
let url = format!("file://{}", src.display());
let repo = Repo::new(&url, &dst, Some("v1"));
repo.sync()
.await
.expect("sync to v1 should succeed after refspec normalization");
let head = repo.head_commit().await.unwrap();
assert_eq!(head, v1_head, "checkout should land on v1 tip");
}
#[tokio::test]
async fn test_checkout_locally_errors_when_rev_not_present() {
let root = tempdir().unwrap();
let src = root.path().join("src");
let dst = root.path().join("dst");
fs::create_dir_all(&src).unwrap();
git_cmd(&src).args(["init"]).output().await.unwrap();
fs::write(src.join("a.txt"), "only").unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "init"])
.output()
.await
.unwrap();
let repo = Repo::new(src.to_str().unwrap(), &dst, None);
repo.sync().await.unwrap();
let result = repo
.checkout_locally("ffffffffffffffffffffffffffffffffffffffff")
.await;
assert!(
result.is_err(),
"unknown rev must error, not silently succeed"
);
}
#[tokio::test]
async fn test_update_returns_change_or_none() {
let root = tempdir().unwrap();
let src = root.path().join("src");
let dst = root.path().join("dst");
fs::create_dir_all(&src).unwrap();
git_cmd(&src).args(["init"]).output().await.unwrap();
fs::write(src.join("a.txt"), "a").unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "init"])
.output()
.await
.unwrap();
let repo = Repo::new(src.to_str().unwrap(), &dst, None);
repo.sync().await.unwrap();
assert!(repo.update().await.unwrap().is_none());
fs::write(src.join("a.txt"), "b").unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "bump"])
.output()
.await
.unwrap();
let c = repo.update().await.unwrap().expect("HEAD moved");
assert!(c.from.is_some());
assert!(c.subjects.iter().any(|s| s.contains("bump")));
}
#[test]
fn test_is_doc_path() {
assert!(is_doc_path("README"));
assert!(is_doc_path("README.md"));
assert!(is_doc_path("readme.txt"));
assert!(is_doc_path("ReadMe"));
assert!(is_doc_path("CHANGELOG"));
assert!(is_doc_path("CHANGELOG.md"));
assert!(is_doc_path("changelog"));
assert!(is_doc_path("doc/foo.txt"));
assert!(is_doc_path("doc/sub/bar.txt"));
assert!(!is_doc_path(""));
assert!(!is_doc_path("doc/"));
assert!(!is_doc_path("docs/foo.txt")); assert!(!is_doc_path("src/README.md")); assert!(!is_doc_path("Cargo.toml"));
}
#[tokio::test]
async fn test_doc_files_changed_filters_to_doc_set() {
let root = tempdir().unwrap();
let src = root.path().join("repo");
fs::create_dir_all(&src).unwrap();
fs::create_dir_all(src.join("doc")).unwrap();
git_cmd(&src).args(["init"]).output().await.unwrap();
fs::write(src.join("README.md"), "v1\n").unwrap();
fs::write(src.join("doc/intro.txt"), "hello\n").unwrap();
fs::write(src.join("src.txt"), "code v1\n").unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "init"])
.output()
.await
.unwrap();
let from = String::from_utf8(
git_cmd(&src)
.args(["rev-parse", "HEAD"])
.output()
.await
.unwrap()
.stdout,
)
.unwrap()
.trim()
.to_string();
fs::write(src.join("README.md"), "v2\n").unwrap();
fs::write(src.join("doc/intro.txt"), "world\n").unwrap();
fs::write(src.join("src.txt"), "code v2\n").unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "bump"])
.output()
.await
.unwrap();
let to = String::from_utf8(
git_cmd(&src)
.args(["rev-parse", "HEAD"])
.output()
.await
.unwrap()
.stdout,
)
.unwrap()
.trim()
.to_string();
let mut files = doc_files_changed(&src, &from, &to);
files.sort();
assert_eq!(
files,
vec!["README.md".to_string(), "doc/intro.txt".to_string()]
);
}
#[tokio::test]
async fn test_doc_files_changed_resilient_to_missing_repo() {
let root = tempdir().unwrap();
let nowhere = root.path().join("nowhere");
let files = doc_files_changed(&nowhere, "deadbeef", "cafebabe");
assert!(files.is_empty());
}
#[tokio::test]
async fn test_doc_file_patch_emits_unified_diff() {
let root = tempdir().unwrap();
let src = root.path().join("repo");
fs::create_dir_all(&src).unwrap();
git_cmd(&src).args(["init"]).output().await.unwrap();
fs::write(src.join("README.md"), "alpha\nbeta\ngamma\n").unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "init"])
.output()
.await
.unwrap();
let from = String::from_utf8(
git_cmd(&src)
.args(["rev-parse", "HEAD"])
.output()
.await
.unwrap()
.stdout,
)
.unwrap()
.trim()
.to_string();
fs::write(src.join("README.md"), "alpha\nBETA\ngamma\n").unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "bump"])
.output()
.await
.unwrap();
let to = String::from_utf8(
git_cmd(&src)
.args(["rev-parse", "HEAD"])
.output()
.await
.unwrap()
.stdout,
)
.unwrap()
.trim()
.to_string();
let patch = doc_file_patch(&src, &from, &to, "README.md").expect("patch");
assert!(patch.contains("diff --git a/README.md b/README.md"));
assert!(patch.contains("--- a/README.md"));
assert!(patch.contains("+++ b/README.md"));
assert!(patch.contains("@@"));
assert!(patch.contains("-beta"));
assert!(patch.contains("+BETA"));
}
#[tokio::test]
async fn test_doc_file_patch_handles_addition() {
let root = tempdir().unwrap();
let src = root.path().join("repo");
fs::create_dir_all(&src).unwrap();
git_cmd(&src).args(["init"]).output().await.unwrap();
fs::write(src.join("README.md"), "v1\n").unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "init"])
.output()
.await
.unwrap();
let from = String::from_utf8(
git_cmd(&src)
.args(["rev-parse", "HEAD"])
.output()
.await
.unwrap()
.stdout,
)
.unwrap()
.trim()
.to_string();
fs::write(src.join("CHANGELOG.md"), "first release\n").unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "add cl"])
.output()
.await
.unwrap();
let to = String::from_utf8(
git_cmd(&src)
.args(["rev-parse", "HEAD"])
.output()
.await
.unwrap()
.stdout,
)
.unwrap()
.trim()
.to_string();
let patch = doc_file_patch(&src, &from, &to, "CHANGELOG.md").expect("patch");
assert!(patch.contains("diff --git a/CHANGELOG.md b/CHANGELOG.md"));
assert!(patch.contains("+first release"));
}
#[tokio::test]
async fn test_doc_file_patch_handles_binary_blob() {
let root = tempdir().unwrap();
let src = root.path().join("repo");
fs::create_dir_all(&src).unwrap();
git_cmd(&src).args(["init"]).output().await.unwrap();
fs::create_dir_all(src.join("doc")).unwrap();
fs::write(
src.join("doc/asset.bin"),
[0xFFu8, 0x00, 0xAB, 0x00, b'd', b'\n'],
)
.unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "init"])
.output()
.await
.unwrap();
let from = String::from_utf8(
git_cmd(&src)
.args(["rev-parse", "HEAD"])
.output()
.await
.unwrap()
.stdout,
)
.unwrap()
.trim()
.to_string();
fs::write(src.join("doc/asset.bin"), [0x00u8, 0x01, 0x02, 0x03, b'\n']).unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "bump"])
.output()
.await
.unwrap();
let to = String::from_utf8(
git_cmd(&src)
.args(["rev-parse", "HEAD"])
.output()
.await
.unwrap()
.stdout,
)
.unwrap()
.trim()
.to_string();
let patch = doc_file_patch(&src, &from, &to, "doc/asset.bin").expect("patch");
assert!(patch.contains("diff --git a/doc/asset.bin b/doc/asset.bin"));
assert!(patch.contains("Binary files a/doc/asset.bin and b/doc/asset.bin differ"));
assert!(!patch.contains("@@"));
}
#[tokio::test]
async fn test_doc_file_patches_skips_paths_not_in_diff() {
let root = tempdir().unwrap();
let src = root.path().join("repo");
fs::create_dir_all(&src).unwrap();
git_cmd(&src).args(["init"]).output().await.unwrap();
fs::write(src.join("README.md"), "v1\n").unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "init"])
.output()
.await
.unwrap();
let from = String::from_utf8(
git_cmd(&src)
.args(["rev-parse", "HEAD"])
.output()
.await
.unwrap()
.stdout,
)
.unwrap()
.trim()
.to_string();
fs::write(src.join("README.md"), "v2\n").unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "bump"])
.output()
.await
.unwrap();
let to = String::from_utf8(
git_cmd(&src)
.args(["rev-parse", "HEAD"])
.output()
.await
.unwrap()
.stdout,
)
.unwrap()
.trim()
.to_string();
let paths = vec!["README.md".to_string(), "ghost.md".to_string()];
let patches = doc_file_patches(&src, &from, &to, &paths);
assert!(patches.contains_key("README.md"));
assert!(!patches.contains_key("ghost.md"));
}
#[tokio::test]
async fn test_doc_file_patch_lines_are_separated() {
let root = tempdir().unwrap();
let src = root.path().join("repo");
fs::create_dir_all(&src).unwrap();
git_cmd(&src).args(["init"]).output().await.unwrap();
fs::write(src.join("README.md"), "alpha\nbeta\n").unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "init"])
.output()
.await
.unwrap();
let from = String::from_utf8(
git_cmd(&src)
.args(["rev-parse", "HEAD"])
.output()
.await
.unwrap()
.stdout,
)
.unwrap()
.trim()
.to_string();
fs::write(src.join("README.md"), "ALPHA\nBETA\n").unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "bump"])
.output()
.await
.unwrap();
let to = String::from_utf8(
git_cmd(&src)
.args(["rev-parse", "HEAD"])
.output()
.await
.unwrap()
.stdout,
)
.unwrap()
.trim()
.to_string();
let patch = doc_file_patch(&src, &from, &to, "README.md").expect("patch");
assert!(patch.contains("-alpha\n"));
assert!(patch.contains("-beta\n"));
assert!(patch.contains("+ALPHA\n"));
assert!(patch.contains("+BETA\n"));
}
#[tokio::test]
async fn test_doc_file_patch_returns_none_for_unchanged_path() {
let root = tempdir().unwrap();
let src = root.path().join("repo");
fs::create_dir_all(&src).unwrap();
git_cmd(&src).args(["init"]).output().await.unwrap();
fs::write(src.join("README.md"), "v1\n").unwrap();
fs::write(src.join("other.txt"), "stable\n").unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "init"])
.output()
.await
.unwrap();
let from = String::from_utf8(
git_cmd(&src)
.args(["rev-parse", "HEAD"])
.output()
.await
.unwrap()
.stdout,
)
.unwrap()
.trim()
.to_string();
fs::write(src.join("README.md"), "v2\n").unwrap();
git_cmd(&src).args(["add", "."]).output().await.unwrap();
git_cmd(&src)
.args(["commit", "-m", "bump"])
.output()
.await
.unwrap();
let to = String::from_utf8(
git_cmd(&src)
.args(["rev-parse", "HEAD"])
.output()
.await
.unwrap()
.stdout,
)
.unwrap()
.trim()
.to_string();
assert!(doc_file_patch(&src, &from, &to, "other.txt").is_none());
}
}