use std::path::{Path, PathBuf};
use crate::error::SkillfileError;
use crate::models::Entry;
pub const PATCHES_DIR: &str = ".skillfile/patches";
#[must_use]
pub fn patches_root(repo_root: &Path) -> PathBuf {
repo_root.join(PATCHES_DIR)
}
pub fn patch_path(entry: &Entry, repo_root: &Path) -> PathBuf {
patches_root(repo_root)
.join(entry.entity_type.dir_name())
.join(format!("{}.patch", entry.name))
}
#[must_use]
pub fn has_patch(entry: &Entry, repo_root: &Path) -> bool {
patch_path(entry, repo_root).exists()
}
pub fn write_patch(
entry: &Entry,
patch_text: &str,
repo_root: &Path,
) -> Result<(), SkillfileError> {
let p = patch_path(entry, repo_root);
if let Some(parent) = p.parent() {
std::fs::create_dir_all(parent)?;
}
std::fs::write(&p, patch_text)?;
Ok(())
}
pub fn read_patch(entry: &Entry, repo_root: &Path) -> Result<String, SkillfileError> {
let p = patch_path(entry, repo_root);
Ok(std::fs::read_to_string(&p)?)
}
pub fn remove_patch(entry: &Entry, repo_root: &Path) -> Result<(), SkillfileError> {
let p = patch_path(entry, repo_root);
if !p.exists() {
return Ok(());
}
std::fs::remove_file(&p)?;
remove_empty_parent(&p);
Ok(())
}
pub fn dir_patch_path(entry: &Entry, filename: &str, repo_root: &Path) -> PathBuf {
patches_root(repo_root)
.join(entry.entity_type.dir_name())
.join(&entry.name)
.join(format!("{filename}.patch"))
}
#[must_use]
pub fn has_dir_patch(entry: &Entry, repo_root: &Path) -> bool {
let d = patches_root(repo_root)
.join(entry.entity_type.dir_name())
.join(&entry.name);
if !d.is_dir() {
return false;
}
walkdir(&d)
.into_iter()
.any(|p| p.extension().is_some_and(|e| e == "patch"))
}
pub fn write_dir_patch(patch_path: &Path, patch_text: &str) -> Result<(), SkillfileError> {
if let Some(parent) = patch_path.parent() {
std::fs::create_dir_all(parent)?;
}
std::fs::write(patch_path, patch_text)?;
Ok(())
}
pub fn remove_dir_patch(
entry: &Entry,
filename: &str,
repo_root: &Path,
) -> Result<(), SkillfileError> {
let p = dir_patch_path(entry, filename, repo_root);
if !p.exists() {
return Ok(());
}
std::fs::remove_file(&p)?;
remove_empty_parent(&p);
Ok(())
}
pub fn remove_all_dir_patches(entry: &Entry, repo_root: &Path) -> Result<(), SkillfileError> {
let d = patches_root(repo_root)
.join(entry.entity_type.dir_name())
.join(&entry.name);
if d.is_dir() {
std::fs::remove_dir_all(&d)?;
}
Ok(())
}
fn remove_empty_parent(path: &Path) {
let Some(parent) = path.parent() else {
return;
};
if !parent.exists() {
return;
}
let is_empty = std::fs::read_dir(parent)
.map(|mut rd| rd.next().is_none())
.unwrap_or(true);
if is_empty {
let _ = std::fs::remove_dir(parent);
}
}
pub fn generate_patch(original: &str, modified: &str, label: &str) -> String {
if original == modified {
return String::new();
}
let diff = similar::TextDiff::from_lines(original, modified);
let raw = format!(
"{}",
diff.unified_diff()
.context_radius(3)
.header(&format!("a/{label}"), &format!("b/{label}"))
);
if raw.is_empty() {
return String::new();
}
let mut result = String::new();
for line in raw.split_inclusive('\n') {
normalize_diff_line(line, &mut result);
}
result
}
fn normalize_diff_line(line: &str, result: &mut String) {
if line.starts_with("\\ ") {
if !result.ends_with('\n') {
result.push('\n');
}
return;
}
result.push_str(line);
if !line.ends_with('\n') {
result.push('\n');
}
}
struct Hunk {
orig_start: usize, body: Vec<String>,
}
fn parse_hunks(patch_text: &str) -> Result<Vec<Hunk>, SkillfileError> {
let lines: Vec<&str> = patch_text.split_inclusive('\n').collect();
let mut pi = 0;
while pi < lines.len() && (lines[pi].starts_with("--- ") || lines[pi].starts_with("+++ ")) {
pi += 1;
}
let mut hunks: Vec<Hunk> = Vec::new();
while pi < lines.len() {
let pl = lines[pi];
if !pl.starts_with("@@ ") {
pi += 1;
continue;
}
let orig_start = pl
.split_whitespace()
.nth(1) .and_then(|s| s.trim_start_matches('-').split(',').next())
.and_then(|n| n.parse::<usize>().ok())
.ok_or_else(|| SkillfileError::Manifest(format!("malformed hunk header: {pl:?}")))?;
pi += 1;
let body = collect_hunk_body(&lines, &mut pi);
hunks.push(Hunk { orig_start, body });
}
Ok(hunks)
}
fn collect_hunk_body(lines: &[&str], pi: &mut usize) -> Vec<String> {
let mut body: Vec<String> = Vec::new();
while *pi < lines.len() {
let hl = lines[*pi];
if hl.starts_with("@@ ") || hl.starts_with("--- ") || hl.starts_with("+++ ") {
break;
}
if hl.starts_with("\\ ") {
*pi += 1;
continue;
}
body.push(hl.to_string());
*pi += 1;
}
body
}
fn try_hunk_at(lines: &[String], start: usize, ctx_lines: &[&str]) -> bool {
if start + ctx_lines.len() > lines.len() {
return false;
}
for (i, expected) in ctx_lines.iter().enumerate() {
if lines[start + i].trim_end_matches(['\n', '\r']) != *expected {
return false;
}
}
true
}
struct HunkSearch<'a> {
lines: &'a [String],
min_pos: usize,
}
impl HunkSearch<'_> {
fn search_nearby(&self, center: usize, ctx_lines: &[&str]) -> Option<usize> {
(1..100usize)
.flat_map(|delta| [Some(center + delta), center.checked_sub(delta)])
.flatten()
.filter(|&c| c >= self.min_pos && c <= self.lines.len())
.find(|&c| try_hunk_at(self.lines, c, ctx_lines))
}
}
fn find_hunk_position(
ctx: &HunkSearch<'_>,
hunk_start: usize,
ctx_lines: &[&str],
) -> Result<usize, SkillfileError> {
if try_hunk_at(ctx.lines, hunk_start, ctx_lines) {
return Ok(hunk_start);
}
if let Some(pos) = ctx.search_nearby(hunk_start, ctx_lines) {
return Ok(pos);
}
if !ctx_lines.is_empty() {
return Err(SkillfileError::Manifest(format!(
"context mismatch: cannot find context starting with {:?} near line {}",
ctx_lines[0],
hunk_start + 1
)));
}
Err(SkillfileError::Manifest(
"patch extends beyond end of file".into(),
))
}
struct PatchState<'a> {
lines: &'a [String],
output: Vec<String>,
pos: usize,
}
impl<'a> PatchState<'a> {
fn new(lines: &'a [String]) -> Self {
Self {
lines,
output: Vec::new(),
pos: 0,
}
}
fn apply_line(&mut self, hl: &str) {
let Some(prefix) = hl.as_bytes().first() else {
return;
};
match prefix {
b' ' if self.pos < self.lines.len() => {
self.output.push(self.lines[self.pos].clone());
self.pos += 1;
}
b'-' => self.pos += 1,
b'+' => self.output.push(hl[1..].to_string()),
_ => {} }
}
fn apply_hunk(&mut self, hunk: &Hunk) {
for hl in &hunk.body {
self.apply_line(hl);
}
}
}
pub fn apply_patch_pure(original: &str, patch_text: &str) -> Result<String, SkillfileError> {
if patch_text.is_empty() {
return Ok(original.to_string());
}
let original = &original.replace("\r\n", "\n");
let patch_text = &patch_text.replace("\r\n", "\n");
let lines: Vec<String> = original
.split_inclusive('\n')
.map(ToString::to_string)
.collect();
let mut state = PatchState::new(&lines);
for hunk in parse_hunks(patch_text)? {
let ctx_lines: Vec<&str> = hunk
.body
.iter()
.filter(|hl| !hl.is_empty() && (hl.starts_with(' ') || hl.starts_with('-')))
.map(|hl| hl[1..].trim_end_matches('\n'))
.collect();
let search = HunkSearch {
lines: &lines,
min_pos: state.pos,
};
let hunk_start =
find_hunk_position(&search, hunk.orig_start.saturating_sub(1), &ctx_lines)?;
state
.output
.extend_from_slice(&lines[state.pos..hunk_start]);
state.pos = hunk_start;
state.apply_hunk(&hunk);
}
state.output.extend_from_slice(&lines[state.pos..]);
Ok(state.output.concat())
}
#[must_use]
pub fn walkdir(dir: &Path) -> Vec<PathBuf> {
let mut result = Vec::new();
walkdir_inner(dir, &mut result);
result.sort();
result
}
fn walkdir_inner(dir: &Path, result: &mut Vec<PathBuf>) {
let Ok(entries) = std::fs::read_dir(dir) else {
return;
};
for entry in entries.flatten() {
let path = entry.path();
if path.is_dir() {
walkdir_inner(&path, result);
} else {
result.push(path);
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::models::{EntityType, SourceFields};
fn github_entry(name: &str, entity_type: EntityType) -> Entry {
Entry {
entity_type,
name: name.to_string(),
source: SourceFields::Github {
owner_repo: "owner/repo".into(),
path_in_repo: "agents/test.md".into(),
ref_: "main".into(),
},
}
}
#[test]
fn generate_patch_identical_returns_empty() {
assert_eq!(generate_patch("hello\n", "hello\n", "test.md"), "");
}
#[test]
fn generate_patch_has_headers() {
let p = generate_patch("old\n", "new\n", "test.md");
assert!(p.contains("--- a/test.md"), "missing fromfile header");
assert!(p.contains("+++ b/test.md"), "missing tofile header");
}
#[test]
fn generate_patch_add_line() {
let p = generate_patch("line1\n", "line1\nline2\n", "test.md");
assert!(p.contains("+line2"));
}
#[test]
fn generate_patch_remove_line() {
let p = generate_patch("line1\nline2\n", "line1\n", "test.md");
assert!(p.contains("-line2"));
}
#[test]
fn generate_patch_all_lines_end_with_newline() {
let p = generate_patch("a\nb\n", "a\nc\n", "test.md");
for seg in p.split_inclusive('\n') {
assert!(seg.ends_with('\n'), "line does not end with \\n: {seg:?}");
}
}
#[test]
fn apply_patch_empty_patch_returns_original() {
let result = apply_patch_pure("hello\n", "").unwrap();
assert_eq!(result, "hello\n");
}
#[test]
fn apply_patch_round_trip_add_line() {
let orig = "line1\nline2\n";
let modified = "line1\nline2\nline3\n";
let patch = generate_patch(orig, modified, "test.md");
let result = apply_patch_pure(orig, &patch).unwrap();
assert_eq!(result, modified);
}
#[test]
fn apply_patch_round_trip_remove_line() {
let orig = "line1\nline2\nline3\n";
let modified = "line1\nline3\n";
let patch = generate_patch(orig, modified, "test.md");
let result = apply_patch_pure(orig, &patch).unwrap();
assert_eq!(result, modified);
}
#[test]
fn apply_patch_round_trip_modify_line() {
let orig = "# Title\n\nSome text here.\n";
let modified = "# Title\n\nSome modified text here.\n";
let patch = generate_patch(orig, modified, "test.md");
let result = apply_patch_pure(orig, &patch).unwrap();
assert_eq!(result, modified);
}
#[test]
fn apply_patch_multi_hunk() {
use std::fmt::Write;
let mut orig = String::new();
for i in 0..20 {
let _ = writeln!(orig, "line{i}");
}
let mut modified = orig.clone();
modified = modified.replace("line2\n", "MODIFIED2\n");
modified = modified.replace("line15\n", "MODIFIED15\n");
let patch = generate_patch(&orig, &modified, "test.md");
assert!(patch.contains("@@"), "should have hunk headers");
let result = apply_patch_pure(&orig, &patch).unwrap();
assert_eq!(result, modified);
}
#[test]
fn apply_patch_context_mismatch_errors() {
let orig = "line1\nline2\n";
let patch = "--- a/test.md\n+++ b/test.md\n@@ -1,2 +1,2 @@\n-totally_wrong\n+new\n";
let result = apply_patch_pure(orig, patch);
assert!(result.is_err());
assert!(result.unwrap_err().to_string().contains("context mismatch"));
}
#[test]
fn patch_path_single_file_agent() {
let entry = github_entry("my-agent", EntityType::Agent);
let root = Path::new("/repo");
let p = patch_path(&entry, root);
assert_eq!(
p,
Path::new("/repo/.skillfile/patches/agents/my-agent.patch")
);
}
#[test]
fn patch_path_single_file_skill() {
let entry = github_entry("my-skill", EntityType::Skill);
let root = Path::new("/repo");
let p = patch_path(&entry, root);
assert_eq!(
p,
Path::new("/repo/.skillfile/patches/skills/my-skill.patch")
);
}
#[test]
fn dir_patch_path_returns_correct() {
let entry = github_entry("lang-pro", EntityType::Skill);
let root = Path::new("/repo");
let p = dir_patch_path(&entry, "python.md", root);
assert_eq!(
p,
Path::new("/repo/.skillfile/patches/skills/lang-pro/python.md.patch")
);
}
#[test]
fn write_read_remove_patch_round_trip() {
let dir = tempfile::tempdir().unwrap();
let entry = github_entry("test-agent", EntityType::Agent);
let patch_text = "--- a/test-agent.md\n+++ b/test-agent.md\n@@ -1 +1 @@\n-old\n+new\n";
write_patch(&entry, patch_text, dir.path()).unwrap();
assert!(has_patch(&entry, dir.path()));
let read = read_patch(&entry, dir.path()).unwrap();
assert_eq!(read, patch_text);
remove_patch(&entry, dir.path()).unwrap();
assert!(!has_patch(&entry, dir.path()));
}
#[test]
fn has_dir_patch_detects_patches() {
let dir = tempfile::tempdir().unwrap();
let entry = github_entry("lang-pro", EntityType::Skill);
assert!(!has_dir_patch(&entry, dir.path()));
write_dir_patch(
&dir_patch_path(&entry, "python.md", dir.path()),
"patch content",
)
.unwrap();
assert!(has_dir_patch(&entry, dir.path()));
}
#[test]
fn remove_all_dir_patches_clears_dir() {
let dir = tempfile::tempdir().unwrap();
let entry = github_entry("lang-pro", EntityType::Skill);
write_dir_patch(&dir_patch_path(&entry, "python.md", dir.path()), "p1").unwrap();
write_dir_patch(&dir_patch_path(&entry, "typescript.md", dir.path()), "p2").unwrap();
assert!(has_dir_patch(&entry, dir.path()));
remove_all_dir_patches(&entry, dir.path()).unwrap();
assert!(!has_dir_patch(&entry, dir.path()));
}
#[test]
fn remove_patch_nonexistent_is_noop() {
let dir = tempfile::tempdir().unwrap();
let entry = github_entry("ghost-agent", EntityType::Agent);
assert!(!has_patch(&entry, dir.path()));
remove_patch(&entry, dir.path()).unwrap();
assert!(!has_patch(&entry, dir.path()));
}
#[test]
fn remove_patch_cleans_up_empty_parent_dir() {
let dir = tempfile::tempdir().unwrap();
let entry = github_entry("solo-skill", EntityType::Skill);
write_patch(&entry, "some patch text\n", dir.path()).unwrap();
let parent = patches_root(dir.path()).join("skills");
assert!(parent.is_dir(), "parent dir should exist after write_patch");
remove_patch(&entry, dir.path()).unwrap();
assert!(
!has_patch(&entry, dir.path()),
"patch file should be removed"
);
assert!(
!parent.exists(),
"empty parent dir should be removed after last patch is deleted"
);
}
#[test]
fn remove_patch_keeps_parent_dir_when_nonempty() {
let dir = tempfile::tempdir().unwrap();
let entry_a = github_entry("skill-a", EntityType::Skill);
let entry_b = github_entry("skill-b", EntityType::Skill);
write_patch(&entry_a, "patch a\n", dir.path()).unwrap();
write_patch(&entry_b, "patch b\n", dir.path()).unwrap();
let parent = patches_root(dir.path()).join("skills");
remove_patch(&entry_a, dir.path()).unwrap();
assert!(
parent.is_dir(),
"parent dir must survive when another patch still exists"
);
assert!(has_patch(&entry_b, dir.path()));
}
#[test]
fn remove_dir_patch_nonexistent_is_noop() {
let dir = tempfile::tempdir().unwrap();
let entry = github_entry("ghost-skill", EntityType::Skill);
remove_dir_patch(&entry, "missing.md", dir.path()).unwrap();
}
#[test]
fn remove_dir_patch_cleans_up_empty_entry_dir() {
let dir = tempfile::tempdir().unwrap();
let entry = github_entry("lang-pro", EntityType::Skill);
write_dir_patch(
&dir_patch_path(&entry, "python.md", dir.path()),
"patch text\n",
)
.unwrap();
let entry_dir = patches_root(dir.path()).join("skills").join("lang-pro");
assert!(
entry_dir.is_dir(),
"entry dir should exist after write_dir_patch"
);
remove_dir_patch(&entry, "python.md", dir.path()).unwrap();
assert!(
!entry_dir.exists(),
"entry dir should be removed when it becomes empty"
);
}
#[test]
fn remove_dir_patch_keeps_entry_dir_when_nonempty() {
let dir = tempfile::tempdir().unwrap();
let entry = github_entry("lang-pro", EntityType::Skill);
write_dir_patch(&dir_patch_path(&entry, "python.md", dir.path()), "p1\n").unwrap();
write_dir_patch(&dir_patch_path(&entry, "typescript.md", dir.path()), "p2\n").unwrap();
let entry_dir = patches_root(dir.path()).join("skills").join("lang-pro");
remove_dir_patch(&entry, "python.md", dir.path()).unwrap();
assert!(
entry_dir.is_dir(),
"entry dir must survive when another patch still exists"
);
}
#[test]
fn generate_patch_no_trailing_newline_original() {
let p = generate_patch("old text", "new text\n", "test.md");
assert!(!p.is_empty(), "patch should not be empty");
for seg in p.split_inclusive('\n') {
assert!(
seg.ends_with('\n'),
"every output line must end with \\n, got: {seg:?}"
);
}
}
#[test]
fn generate_patch_no_trailing_newline_modified() {
let p = generate_patch("old text\n", "new text", "test.md");
assert!(!p.is_empty(), "patch should not be empty");
for seg in p.split_inclusive('\n') {
assert!(
seg.ends_with('\n'),
"every output line must end with \\n, got: {seg:?}"
);
}
}
#[test]
fn generate_patch_both_inputs_no_trailing_newline() {
let p = generate_patch("old line", "new line", "test.md");
assert!(!p.is_empty(), "patch should not be empty");
for seg in p.split_inclusive('\n') {
assert!(
seg.ends_with('\n'),
"every output line must end with \\n, got: {seg:?}"
);
}
}
#[test]
fn generate_patch_no_trailing_newline_roundtrip() {
let orig = "line one\nline two";
let modified = "line one\nline changed";
let patch = generate_patch(orig, modified, "test.md");
assert!(!patch.is_empty());
let result = apply_patch_pure(orig, &patch).unwrap();
assert_eq!(
result.trim_end_matches('\n'),
modified.trim_end_matches('\n')
);
}
#[test]
fn apply_patch_pure_with_no_newline_marker() {
let orig = "line1\nline2\n";
let patch = concat!(
"--- a/test.md\n",
"+++ b/test.md\n",
"@@ -1,2 +1,2 @@\n",
" line1\n",
"-line2\n",
"+changed\n",
"\\ No newline at end of file\n",
);
let result = apply_patch_pure(orig, patch).unwrap();
assert_eq!(result, "line1\nchanged\n");
}
#[test]
fn walkdir_empty_directory_returns_empty() {
let dir = tempfile::tempdir().unwrap();
let files = walkdir(dir.path());
assert!(
files.is_empty(),
"walkdir of empty dir should return empty vec"
);
}
#[test]
fn walkdir_nonexistent_directory_returns_empty() {
let path = Path::new("/tmp/skillfile_test_does_not_exist_xyz_9999");
let files = walkdir(path);
assert!(
files.is_empty(),
"walkdir of non-existent dir should return empty vec"
);
}
#[test]
fn walkdir_nested_subdirectories() {
let dir = tempfile::tempdir().unwrap();
let sub = dir.path().join("sub");
std::fs::create_dir_all(&sub).unwrap();
std::fs::write(dir.path().join("top.txt"), "top").unwrap();
std::fs::write(sub.join("nested.txt"), "nested").unwrap();
let files = walkdir(dir.path());
assert_eq!(files.len(), 2, "should find both files");
let names: Vec<String> = files
.iter()
.map(|p| p.file_name().unwrap().to_string_lossy().into_owned())
.collect();
assert!(names.contains(&"top.txt".to_string()));
assert!(names.contains(&"nested.txt".to_string()));
}
#[test]
fn walkdir_results_are_sorted() {
let dir = tempfile::tempdir().unwrap();
std::fs::write(dir.path().join("z.txt"), "z").unwrap();
std::fs::write(dir.path().join("a.txt"), "a").unwrap();
std::fs::write(dir.path().join("m.txt"), "m").unwrap();
let files = walkdir(dir.path());
let sorted = {
let mut v = files.clone();
v.sort();
v
};
assert_eq!(files, sorted, "walkdir results must be sorted");
}
#[test]
fn apply_patch_pure_handles_crlf_original() {
let orig_lf = "line1\nline2\nline3\n";
let modified = "line1\nchanged\nline3\n";
let patch = generate_patch(orig_lf, modified, "test.md");
let orig_crlf = "line1\r\nline2\r\nline3\r\n";
let result = apply_patch_pure(orig_crlf, &patch).unwrap();
assert_eq!(result, modified);
}
#[test]
fn apply_patch_pure_handles_crlf_patch() {
let orig = "line1\nline2\nline3\n";
let modified = "line1\nchanged\nline3\n";
let patch_lf = generate_patch(orig, modified, "test.md");
let patch_crlf = patch_lf.replace('\n', "\r\n");
let result = apply_patch_pure(orig, &patch_crlf).unwrap();
assert_eq!(result, modified);
}
#[test]
fn apply_patch_pure_fuzzy_hunk_matching() {
use std::fmt::Write;
let mut orig = String::new();
for i in 1..=20 {
let _ = writeln!(orig, "line{i}");
}
let patch = concat!(
"--- a/test.md\n",
"+++ b/test.md\n",
"@@ -5,3 +5,3 @@\n", " line7\n",
"-line8\n",
"+CHANGED8\n",
" line9\n",
);
let result = apply_patch_pure(&orig, patch).unwrap();
assert!(
result.contains("CHANGED8\n"),
"fuzzy match should have applied the change"
);
assert!(
!result.contains("line8\n"),
"original line8 should have been replaced"
);
}
#[test]
fn apply_patch_pure_extends_beyond_eof_errors() {
let orig = "line1\nline2\n";
let patch = concat!(
"--- a/test.md\n",
"+++ b/test.md\n",
"@@ -999,1 +999,1 @@\n",
"-nonexistent_line\n",
"+replacement\n",
);
let result = apply_patch_pure(orig, patch);
assert!(
result.is_err(),
"applying a patch beyond EOF should return an error"
);
}
}