use anyhow::{Context, Result};
use fs2::FileExt;
use std::fs::OpenOptions;
use std::io::Read;
use std::path::Path;
use crate::{component, frontmatter, merge, recover, sessions, snapshot, template};
use crate::snapshot::find_project_root;
fn resolve_ipc_project_root(canonical: &Path) -> std::path::PathBuf {
let parent = canonical.parent().unwrap_or(Path::new("/"));
if let Some(superproject) = crate::git::git_superproject_at(parent) {
return superproject;
}
if let Some(toplevel) = crate::git::git_toplevel_at(parent) {
return toplevel;
}
if let Some(p) = find_project_root(canonical) {
return p;
}
parent.to_path_buf()
}
fn find_boundary_id(doc: &str, component_name: &str) -> Option<String> {
let components = component::parse(doc).ok()?;
let comp = components.iter().find(|c| c.name == component_name)?;
let content = &doc[comp.open_end..comp.close_start];
let code_ranges = component::find_code_ranges(doc);
let prefix = "<!-- agent:boundary:";
let suffix = " -->";
let mut search_from = 0;
while let Some(start) = content[search_from..].find(prefix) {
let abs_start = comp.open_end + search_from + start;
if code_ranges.iter().any(|&(cs, ce)| abs_start >= cs && abs_start < ce) {
search_from += start + prefix.len();
continue;
}
let id_start = search_from + start + prefix.len();
if let Some(end) = content[id_start..].find(suffix) {
let id = &content[id_start..id_start + end];
if !id.is_empty() {
return Some(id.to_string());
}
}
break;
}
None
}
fn is_append_mode_component(name: &str) -> bool {
matches!(name, "exchange" | "findings")
}
pub fn extract_normalization_targets(before: &str, after: &str) -> Vec<String> {
let before_comps = component::parse(before).unwrap_or_default();
let after_comps = component::parse(after).unwrap_or_default();
let before_exc = before_comps
.iter()
.find(|c| c.name == "exchange")
.map(|c| c.content(before))
.unwrap_or("");
let after_exc = after_comps
.iter()
.find(|c| c.name == "exchange")
.map(|c| c.content(after))
.unwrap_or("");
if before_exc == after_exc {
return vec![];
}
let mut seen = std::collections::HashSet::<String>::new();
let mut targets = Vec::new();
for (before_line, after_line) in before_exc.lines().zip(after_exc.lines()) {
if let Some(stripped) = after_line.strip_prefix("❯ ") {
if before_line == stripped && seen.insert(stripped.to_string()) {
targets.push(stripped.to_string());
}
}
}
targets
}
pub fn normalize_user_prompts_in_exchange(content: &str, baseline: &str, snapshot: &str) -> String {
let Ok(content_comps) = component::parse(content) else {
return content.to_string();
};
let baseline_comps = component::parse(baseline).unwrap_or_default();
let snap_comps = component::parse(snapshot).unwrap_or_default();
let Some(exchange) = content_comps.iter().find(|c| c.name == "exchange") else {
return content.to_string();
};
let baseline_exc = baseline_comps
.iter()
.find(|c| c.name == "exchange")
.map(|e| e.content(baseline))
.unwrap_or("");
let snap_exc = snap_comps
.iter()
.find(|c| c.name == "exchange")
.map(|e| e.content(snapshot))
.unwrap_or("");
let exc_content = exchange.content(content);
let boundary_prefix = "<!-- agent:boundary:";
let boundary_pos = {
let mut pos = exc_content.len();
let mut offset = 0;
for line in exc_content.lines() {
if line.trim().starts_with(boundary_prefix) {
pos = offset; }
offset += line.len() + 1;
}
pos
};
let content_user_region = &exc_content[..boundary_pos];
let content_agent_region = &exc_content[boundary_pos..];
let strip = |s: &str| -> String {
let filtered: Vec<&str> = s.lines()
.filter(|l| !l.trim().starts_with(boundary_prefix))
.collect();
let mut out = filtered.join("\n");
if s.ends_with('\n') && !out.is_empty() {
out.push('\n');
}
out
};
let baseline_stripped = strip(baseline_exc);
let snap_stripped = strip(snap_exc);
use similar::{ChangeTag, TextDiff};
fn fence_open(trimmed: &str) -> Option<(char, usize)> {
let fc = trimmed.chars().next()?;
if fc != '`' && fc != '~' {
return None;
}
let fl = trimmed.chars().take_while(|&c| c == fc).count();
if fl >= 3 { Some((fc, fl)) } else { None }
}
fn fence_close(trimmed: &str, fence_char: char, fence_len: usize) -> bool {
let fc = trimmed.chars().next().unwrap_or('\0');
if fc != fence_char { return false; }
let fl = trimmed.chars().take_while(|&c| c == fc).count();
fl >= fence_len && trimmed[fl..].trim().is_empty()
}
let diff = TextDiff::from_lines(snap_stripped.as_str(), baseline_stripped.as_str());
let mut user_added = std::collections::HashSet::<String>::new();
let mut in_baseline_fence = false;
let mut baseline_fence_char = '`';
let mut baseline_fence_len = 3usize;
for change in diff.iter_all_changes() {
let line = change.value().trim_end_matches('\n');
let trimmed = line.trim();
let was_in_fence = in_baseline_fence;
if change.tag() != ChangeTag::Delete {
if !in_baseline_fence {
if let Some((fc, fl)) = fence_open(trimmed) {
in_baseline_fence = true;
baseline_fence_char = fc;
baseline_fence_len = fl;
}
} else if fence_close(trimmed, baseline_fence_char, baseline_fence_len) {
in_baseline_fence = false;
}
}
let is_fence_delim = fence_open(trimmed).is_some()
|| (was_in_fence && fence_close(trimmed, baseline_fence_char, baseline_fence_len));
if change.tag() == ChangeTag::Insert
&& !in_baseline_fence
&& !trimmed.is_empty()
&& !trimmed.starts_with('❯')
&& !trimmed.starts_with("<!-- ")
&& !trimmed.starts_with('#')
&& !is_fence_delim
&& !trimmed.starts_with('"')
{
user_added.insert(line.to_string());
}
}
if user_added.is_empty() {
return content.to_string();
}
let mut in_content_fence = false;
let mut content_fence_char = '`';
let mut content_fence_len = 3usize;
let mut normalized_user = String::new();
for line in content_user_region.lines() {
let trimmed = line.trim();
if !in_content_fence {
if let Some((fc, fl)) = fence_open(trimmed) {
in_content_fence = true;
content_fence_char = fc;
content_fence_len = fl;
}
} else if fence_close(trimmed, content_fence_char, content_fence_len) {
in_content_fence = false;
}
if !in_content_fence && user_added.contains(line) {
normalized_user.push_str("❯ ");
}
normalized_user.push_str(line);
normalized_user.push('\n');
}
if !content_user_region.is_empty() && !content_user_region.ends_with('\n') {
normalized_user.truncate(normalized_user.len() - 1);
}
if content_user_region.is_empty() {
normalized_user.clear();
}
let new_exc_content = format!("{}{}", normalized_user, content_agent_region);
exchange.replace_content(content, &new_exc_content)
}
pub const MAX_NORMALIZE_USER_LINES: usize = 50;
pub fn normalize_user_prompts_in_exchange_safe(
content: &str,
baseline: &str,
snapshot: &str,
file: &std::path::Path,
) -> String {
let normalized = normalize_user_prompts_in_exchange(content, baseline, snapshot);
fn count_prefixes(s: &str) -> usize {
let mut n = s.matches("\n❯ ").count();
if s.starts_with("❯ ") {
n += 1;
}
n
}
let before = count_prefixes(content);
let after = count_prefixes(&normalized);
let applied = after.saturating_sub(before);
crate::ops_log::log_op(file, &format!(
"normalize_user_prompts snap_len={} base_len={} applied={}",
snapshot.len(), baseline.len(), applied
));
if applied > MAX_NORMALIZE_USER_LINES {
eprintln!(
"[normalize] WARN: {} ❯-prefixes would be applied, exceeds threshold {} for {} — \
suspected snapshot/baseline divergence. Force-committing current file to absorb drift; \
skipping ❯ prefix application this cycle.",
applied, MAX_NORMALIZE_USER_LINES, file.display()
);
crate::ops_log::log_op(file, &format!(
"normalize_threshold_exceeded applied={} threshold={} action=force_commit_and_passthrough",
applied, MAX_NORMALIZE_USER_LINES
));
if let Err(e) = crate::git::commit(file) {
eprintln!("[normalize] WARN: force-commit failed: {}", e);
}
return content.to_string();
}
normalized
}
pub fn is_stale_baseline(baseline: &str, snapshot: &str) -> bool {
let base_clean = strip_boundary_for_dedup(baseline);
let snap_clean = strip_boundary_for_dedup(snapshot);
if base_clean == snap_clean {
return false;
}
if let (Ok(snap_components), Ok(base_components)) = (
component::parse(snapshot),
component::parse(baseline),
)
&& !snap_components.is_empty()
{
for snap_comp in &snap_components {
let is_append = snap_comp.patch_mode()
.map(|m| m == "append")
.unwrap_or(is_append_mode_component(&snap_comp.name));
if !is_append {
continue;
}
let snap_content = strip_boundary_for_dedup(
snap_comp.content(snapshot).trim(),
);
if snap_content.is_empty() {
continue;
}
if let Some(base_comp) = base_components.iter().find(|c| c.name == snap_comp.name) {
let base_content = strip_boundary_for_dedup(
base_comp.content(baseline).trim(),
);
if !base_content.contains(&snap_content) {
return true;
}
} else {
return true;
}
}
return false;
}
!base_clean.starts_with(&snap_clean)
}
fn strip_boundary_for_dedup(content: &str) -> String {
content.lines()
.filter(|line| !line.trim().starts_with("<!-- agent:boundary:"))
.collect::<Vec<_>>()
.join("\n")
}
fn log_dedup(file: &Path, context: &str) {
let msg = format!("[write] dedup: {} — {}", file.display(), context);
eprintln!("{}", msg);
use std::io::Write;
if let Ok(mut f) = std::fs::OpenOptions::new()
.create(true).append(true)
.open("/tmp/agent-doc-write-dedup.log")
{
let ts = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.map(|d| d.as_secs())
.unwrap_or(0);
let bt = std::backtrace::Backtrace::force_capture();
let _ = writeln!(f, "[{}] {} backtrace:\n{}", ts, msg, bt);
}
}
fn verify_pane_ownership(file: &Path) -> Result<()> {
if !sessions::in_tmux() {
return Ok(());
}
let content = match std::fs::read_to_string(file) {
Ok(c) => c,
Err(_) => return Ok(()),
};
let session_id = match frontmatter::parse(&content) {
Ok((fm, _)) => match fm.session {
Some(s) => s,
None => return Ok(()),
},
Err(_) => return Ok(()),
};
let entry = match sessions::lookup_entry(&session_id) {
Ok(Some(e)) => e,
_ => return Ok(()),
};
let current = match sessions::current_pane() {
Ok(p) => p,
Err(_) => return Ok(()),
};
if entry.pane != current {
anyhow::bail!(
"pane ownership mismatch: session {} owned by pane {}, current pane is {}. \
Use `agent-doc claim` to reclaim.",
session_id, entry.pane, current
);
}
Ok(())
}
pub fn run(file: &Path, baseline: Option<&str>) -> Result<()> {
if !file.exists() {
anyhow::bail!("file not found: {}", file.display());
}
verify_pane_ownership(file)?;
let mut response = String::new();
std::io::stdin()
.read_to_string(&mut response)
.context("failed to read response from stdin")?;
if response.trim().is_empty() {
anyhow::bail!("empty response — nothing to write");
}
let response = strip_assistant_heading(&response);
let content_at_start = std::fs::read_to_string(file)
.with_context(|| format!("failed to read {}", file.display()))?;
let base = baseline.unwrap_or(&content_at_start);
recover::save_pending(file, &response)?;
snapshot::save_pre_response(file, base)?;
let mut content_ours = base.to_string();
if !content_ours.ends_with('\n') {
content_ours.push('\n');
}
content_ours.push_str("## Assistant\n\n");
content_ours.push_str(&response);
if !response.ends_with('\n') {
content_ours.push('\n');
}
content_ours.push_str("\n## User\n\n");
let doc_lock = acquire_doc_lock(file)?;
let content_current = std::fs::read_to_string(file)
.with_context(|| format!("failed to re-read {}", file.display()))?;
let final_content = if content_current == base {
content_ours.clone()
} else {
eprintln!("[write] File was modified during response generation. Merging...");
merge::merge_contents(base, &content_ours, &content_current)?
};
if strip_boundary_for_dedup(&final_content) == strip_boundary_for_dedup(&content_current) {
log_dedup(file, "no changes after merge, skipping write");
drop(doc_lock);
recover::clear_pending(file)?;
return Ok(());
}
atomic_write(file, &final_content)?;
snapshot::save(file, &final_content)?;
crate::ops_log::log_cycle(file, "write_inline", Some(&content_ours), Some(&final_content));
crate::ops_log::log_op(file, &format!(
"write_inline_done file={} snap_len={}",
file.display(), final_content.len()
));
drop(doc_lock);
recover::clear_pending(file)?;
eprintln!("[write] Response appended to {}", file.display());
Ok(())
}
pub fn run_template(file: &Path, baseline: Option<&str>) -> Result<()> {
if !file.exists() {
anyhow::bail!("file not found: {}", file.display());
}
verify_pane_ownership(file)?;
let mut response = String::new();
std::io::stdin()
.read_to_string(&mut response)
.context("failed to read response from stdin")?;
if response.trim().is_empty() {
anyhow::bail!("empty response — nothing to write");
}
recover::save_pending(file, &response)?;
let (mut patches, unmatched) = template::parse_patches(&response)
.context("failed to parse patch blocks from response")?;
sanitize_patches(&mut patches);
if patches.is_empty() && unmatched.trim().is_empty() {
anyhow::bail!("no patch blocks or content found in response");
}
let content_at_start = std::fs::read_to_string(file)
.with_context(|| format!("failed to read {}", file.display()))?;
let base = baseline.unwrap_or(&content_at_start);
snapshot::save_pre_response(file, base)?;
let content_ours = template::apply_patches(base, &patches, &unmatched, file)
.context("failed to apply template patches")?;
let doc_lock = acquire_doc_lock(file)?;
let content_current = std::fs::read_to_string(file)
.with_context(|| format!("failed to re-read {}", file.display()))?;
let final_content = if content_current == base {
content_ours.clone()
} else {
eprintln!("[write] File was modified during response generation. Merging...");
merge::merge_contents(base, &content_ours, &content_current)?
};
if strip_boundary_for_dedup(&final_content) == strip_boundary_for_dedup(&content_current) {
log_dedup(file, "no changes after merge, skipping write");
drop(doc_lock);
recover::clear_pending(file)?;
return Ok(());
}
atomic_write(file, &final_content)?;
snapshot::save(file, &final_content)?;
crate::ops_log::log_cycle(file, "write_template", Some(&content_ours), Some(&final_content));
crate::ops_log::log_op(file, &format!(
"write_template_done file={} snap_len={} patches={}",
file.display(), final_content.len(), patches.len()
));
drop(doc_lock);
recover::clear_pending(file)?;
eprintln!(
"[write] Template patches applied to {} ({} components patched)",
file.display(),
patches.len()
);
Ok(())
}
pub fn run_stream(file: &Path, baseline: Option<&str>, force_disk: bool) -> Result<()> {
let t_total = std::time::Instant::now();
if !file.exists() {
anyhow::bail!("file not found: {}", file.display());
}
verify_pane_ownership(file)?;
let mut response = String::new();
std::io::stdin()
.read_to_string(&mut response)
.context("failed to read response from stdin")?;
if response.trim().is_empty() {
anyhow::bail!("empty response — nothing to write");
}
recover::save_pending(file, &response)?;
let (mut patches, unmatched) = template::parse_patches(&response)
.context("failed to parse patch blocks from response")?;
sanitize_patches(&mut patches);
if patches.is_empty() && unmatched.trim().is_empty() {
anyhow::bail!("no patch blocks or content found in response");
}
if patches.is_empty() && !unmatched.trim().is_empty() {
let current = std::fs::read_to_string(file)
.with_context(|| format!("failed to read {}", file.display()))?;
let comps = crate::component::parse(¤t).unwrap_or_default();
if comps.is_empty() {
eprintln!(
"[write] WARNING: {} bytes of content but file has no template components — \
content may not be applied correctly. Consider running `agent-doc init` \
with --mode template first.",
unmatched.trim().len()
);
}
}
{
let pre_content = std::fs::read_to_string(file)
.with_context(|| format!("failed to read {} for pre-response", file.display()))?;
snapshot::save_pre_response(file, &pre_content)?;
}
if !force_disk {
let canonical = file.canonicalize()?;
let project_root = resolve_ipc_project_root(&canonical);
let patches_dir = project_root.join(".agent-doc/patches");
if patches_dir.exists() {
let content_at_start = std::fs::read_to_string(file)
.with_context(|| format!("failed to read {}", file.display()))?;
let base = baseline.unwrap_or(&content_at_start);
let mode_overrides = std::collections::HashMap::new();
let t_apply = std::time::Instant::now();
let mut content_ours = template::apply_patches_with_overrides(
base, &patches, &unmatched, file, &mode_overrides,
).context("failed to apply patches for snapshot")?;
let elapsed_apply = t_apply.elapsed().as_millis();
if elapsed_apply > 0 {
eprintln!("[perf] apply_patches_with_overrides: {}ms", elapsed_apply);
}
if baseline.is_none()
&& let Ok(Some(current_snap)) = snapshot::load(file)
&& is_stale_baseline(base, ¤t_snap)
{
eprintln!(
"[write] WARNING: baseline missing snapshot content — stale baseline detected, using current file as baseline"
);
crate::ops_log::log_op(file, &format!(
"stale_baseline_detected file={} base_len={} snap_len={} file_len={}",
file.display(), base.len(), current_snap.len(), content_at_start.len()
));
content_ours = template::apply_patches_with_overrides(
&content_at_start, &patches, &unmatched, file, &mode_overrides,
).context("failed to apply patches with fresh baseline")?;
}
let normalize_prefix_lines: Vec<String> =
if let Ok(Some(ref snap)) = snapshot::load(file) {
let before = content_ours.clone();
content_ours = normalize_user_prompts_in_exchange_safe(&content_ours, base, snap, file);
extract_normalization_targets(&before, &content_ours)
} else {
vec![]
};
if strip_boundary_for_dedup(&content_ours) == strip_boundary_for_dedup(&content_at_start) {
log_dedup(file, "no changes after merge, skipping write");
recover::clear_pending(file)?;
return Ok(());
}
let t_ipc = std::time::Instant::now();
let norm_lines_opt = if normalize_prefix_lines.is_empty() { None } else { Some(normalize_prefix_lines.as_slice()) };
if try_ipc(file, &patches, &unmatched, None, baseline, Some(&content_ours), norm_lines_opt)? {
let elapsed_ipc = t_ipc.elapsed().as_millis();
if elapsed_ipc > 0 {
eprintln!("[perf] try_ipc: {}ms", elapsed_ipc);
}
let elapsed_total = t_total.elapsed().as_millis();
if elapsed_total > 0 {
eprintln!("[perf] run_stream total: {}ms", elapsed_total);
}
crate::ops_log::log_op(file, &format!(
"ipc_write_consumed file={} patches={}",
file.display(), patches.len()
));
let session_id = frontmatter::read_session_id(file).unwrap_or_default();
crate::hooks::fire_post_write(file, &session_id, patches.len());
crate::hooks::fire_doc_event(file, "post_write");
recover::clear_pending(file)?;
return Ok(());
}
let hash = snapshot::doc_hash(file)?;
let patch_file = patches_dir.join(format!("{}.json", hash));
let raw_doc = std::fs::read_to_string(file).unwrap_or_default();
let current_doc_for_boundary = template::reposition_boundary_to_end_with_summary(&raw_doc, file.file_stem().and_then(|s| s.to_str()));
let norm_lines_for_timeout = if normalize_prefix_lines.is_empty() { None } else { Some(normalize_prefix_lines.as_slice()) };
let ipc_patches: Vec<serde_json::Value> = patches
.iter()
.filter(|p| p.name != "frontmatter")
.map(|p| {
let content = match norm_lines_for_timeout {
Some(prefix_lines) if !prefix_lines.is_empty() && is_append_mode_component(&p.name) => {
normalize_patch_content(&p.content, prefix_lines)
}
_ => p.content.clone(),
};
let mut patch_json = serde_json::json!({
"component": p.name,
"content": content,
});
if let Some(bid) = find_boundary_id(¤t_doc_for_boundary, &p.name) {
patch_json["boundary_id"] = serde_json::Value::String(bid);
} else if is_append_mode_component(&p.name) {
patch_json["ensure_boundary"] = serde_json::Value::Bool(true);
}
patch_json
})
.collect();
let patch_id = uuid::Uuid::new_v4().to_string();
let mut ipc_payload = serde_json::json!({
"file": canonical.to_string_lossy(),
"patches": ipc_patches,
"unmatched": unmatched.trim(),
"baseline": baseline.unwrap_or(""),
"reposition_boundary": true,
});
ipc_payload["patch_id"] = serde_json::Value::String(patch_id);
if let Some(lines) = norm_lines_for_timeout
&& !lines.is_empty()
{
ipc_payload["normalize_prefix_lines"] = serde_json::Value::Array(
lines.iter().map(|l| serde_json::Value::String(l.clone())).collect()
);
}
let frontmatter_yaml: Option<String> = patches
.iter()
.find(|p| p.name == "frontmatter")
.map(|p| p.content.trim().to_string());
if let Some(ref yaml) = frontmatter_yaml {
ipc_payload["frontmatter"] = serde_json::Value::String(yaml.clone());
}
atomic_write(
&patch_file,
&serde_json::to_string_pretty(&ipc_payload)?,
)?;
eprintln!("[write] IPC timeout — response saved as patch, awaiting plugin");
if let Err(e) = atomic_write(file, &content_ours) {
eprintln!("[write] WARNING: failed to write content_ours to working tree before exit(75): {}", e);
}
if let Err(e) = snapshot::save(file, &content_ours) {
eprintln!("[write] WARNING: snapshot save before exit(75) failed: {}", e);
}
if crate::git::is_in_git_repo(file)
&& let Err(e) = crate::git::commit(file) {
eprintln!("[commit] warning: commit before exit(75) failed: {}", e);
}
std::process::exit(75); }
}
if force_disk
&& let Ok(canonical) = file.canonicalize() {
let project_root = resolve_ipc_project_root(&canonical);
let patches_dir = project_root.join(".agent-doc/patches");
if let Ok(hash) = snapshot::doc_hash(file) {
let patch_file = patches_dir.join(format!("{}.json", hash));
if patch_file.exists() {
eprintln!("[write] cleaning stale IPC patch file to prevent double-write");
if let Ok(stale_content) = std::fs::read_to_string(&patch_file)
&& let Ok(stale_json) = serde_json::from_str::<serde_json::Value>(&stale_content)
&& let Some(patch_id) = stale_json.get("patch_id").and_then(|v| v.as_str())
{
let claimed_dir = project_root.join(".agent-doc/claimed-patches");
match std::fs::create_dir_all(&claimed_dir) {
Err(e) => eprintln!("[write] WARNING: failed to create claimed-patches dir: {e}"),
Ok(_) => {
let sentinel = claimed_dir.join(patch_id);
if let Err(e) = std::fs::write(&sentinel, "") {
eprintln!("[write] WARNING: failed to write patch sentinel: {e}");
} else {
eprintln!("[write] patch_id {} claimed (sentinel written)", &patch_id[..8]);
}
}
}
}
let _ = std::fs::remove_file(&patch_file);
}
}
}
let t_disk = std::time::Instant::now();
let content_at_start = std::fs::read_to_string(file)
.with_context(|| format!("failed to read {}", file.display()))?;
let base = baseline.unwrap_or(&content_at_start);
let mode_overrides = std::collections::HashMap::new();
let t_apply2 = std::time::Instant::now();
let mut content_ours = template::apply_patches_with_overrides(
base, &patches, &unmatched, file, &mode_overrides,
).context("failed to apply template patches")?;
let elapsed_apply2 = t_apply2.elapsed().as_millis();
if elapsed_apply2 > 0 {
eprintln!("[perf] apply_patches_with_overrides (disk): {}ms", elapsed_apply2);
}
if let Some(fm_patch) = patches.iter().find(|p| p.name == "frontmatter") {
content_ours = crate::frontmatter::merge_fields(&content_ours, &fm_patch.content)
.context("failed to merge frontmatter patch")?;
}
if let Ok(Some(snap)) = snapshot::load(file) {
content_ours = normalize_user_prompts_in_exchange_safe(&content_ours, base, &snap, file);
}
let doc_lock = acquire_doc_lock(file)?;
let content_current = std::fs::read_to_string(file)
.with_context(|| format!("failed to re-read {}", file.display()))?;
let (final_content, crdt_state) = if content_current == base {
let doc = crate::crdt::CrdtDoc::from_text(&content_ours);
(content_ours.clone(), doc.encode_state())
} else {
eprintln!("[write] File was modified during response generation. CRDT merging...");
let base_state = crate::crdt::CrdtDoc::from_text(base).encode_state();
merge::merge_contents_crdt(Some(&base_state), &content_ours, &content_current)?
};
if strip_boundary_for_dedup(&final_content) == strip_boundary_for_dedup(&content_current) {
log_dedup(file, "no changes after merge, skipping write");
drop(doc_lock);
recover::clear_pending(file)?;
let elapsed_total = t_total.elapsed().as_millis();
if elapsed_total > 0 {
eprintln!("[perf] run_stream total: {}ms", elapsed_total);
}
return Ok(());
}
atomic_write(file, &final_content)?;
snapshot::save(file, &final_content)?;
snapshot::save_crdt(file, &crdt_state)?;
crate::ops_log::log_cycle(file, "write_stream", Some(&content_ours), Some(&final_content));
crate::ops_log::log_op(file, &format!(
"write_stream_done file={} snap_len={}",
file.display(), final_content.len()
));
drop(doc_lock);
recover::clear_pending(file)?;
let elapsed_disk = t_disk.elapsed().as_millis();
if elapsed_disk > 0 {
eprintln!("[perf] disk_write_path: {}ms", elapsed_disk);
}
let elapsed_total = t_total.elapsed().as_millis();
if elapsed_total > 0 {
eprintln!("[perf] run_stream total: {}ms", elapsed_total);
}
eprintln!(
"[write] Stream patches applied to {} ({} components patched, CRDT)",
file.display(),
patches.len()
);
Ok(())
}
pub fn run_ipc(file: &Path, baseline: Option<&str>) -> Result<()> {
if !file.exists() {
anyhow::bail!("file not found: {}", file.display());
}
let mut response = String::new();
std::io::stdin()
.read_to_string(&mut response)
.context("failed to read response from stdin")?;
if response.trim().is_empty() {
anyhow::bail!("empty response — nothing to write");
}
recover::save_pending(file, &response)?;
let (mut patches, unmatched) = template::parse_patches(&response)
.context("failed to parse patch blocks from response")?;
sanitize_patches(&mut patches);
if patches.is_empty() && unmatched.trim().is_empty() {
anyhow::bail!("no patch blocks or content found in response");
}
let canonical = file.canonicalize()?;
let hash = snapshot::doc_hash(file)?;
let project_root = resolve_ipc_project_root(&canonical);
let patches_dir = project_root.join(".agent-doc/patches");
std::fs::create_dir_all(&patches_dir)?;
let patch_file = patches_dir.join(format!("{}.json", hash));
let raw_doc = std::fs::read_to_string(file).unwrap_or_default();
let current_doc_for_boundary = template::reposition_boundary_to_end_with_summary(&raw_doc, file.file_stem().and_then(|s| s.to_str()));
let mut frontmatter_yaml: Option<String> = None;
let ipc_patches: Vec<serde_json::Value> = patches
.iter()
.filter_map(|p| {
if p.name == "frontmatter" {
frontmatter_yaml = Some(p.content.trim().to_string());
None
} else {
let mut patch_json = serde_json::json!({
"component": p.name,
"content": p.content,
});
if let Some(bid) = find_boundary_id(¤t_doc_for_boundary, &p.name) {
patch_json["boundary_id"] = serde_json::Value::String(bid);
} else if is_append_mode_component(&p.name) {
patch_json["ensure_boundary"] = serde_json::Value::Bool(true);
}
Some(patch_json)
}
})
.collect();
let mut ipc_payload = serde_json::json!({
"file": canonical.to_string_lossy(),
"patches": ipc_patches,
"unmatched": unmatched.trim(),
"baseline": baseline.unwrap_or(""),
});
if let Some(ref yaml) = frontmatter_yaml {
ipc_payload["frontmatter"] = serde_json::Value::String(yaml.clone());
}
atomic_write(
&patch_file,
&serde_json::to_string_pretty(&ipc_payload)?,
)?;
eprintln!(
"[write] IPC patch written to {} ({} components)",
patch_file.display(),
patches.len()
);
let timeout = std::time::Duration::from_secs(2);
let poll_interval = std::time::Duration::from_millis(100);
let start = std::time::Instant::now();
while start.elapsed() < timeout {
if !patch_file.exists() {
let content = std::fs::read_to_string(file)
.with_context(|| format!("failed to read {} after IPC", file.display()))?;
snapshot::save(file, &content)?;
crate::ops_log::log_op(file, &format!(
"snapshot_saved_file_ipc file={} snap_len={}",
file.display(), content.len()
));
let crdt_doc = crate::crdt::CrdtDoc::from_text(&content);
snapshot::save_crdt(file, &crdt_doc.encode_state())?;
recover::clear_pending(file)?;
eprintln!("[write] IPC patch consumed by plugin — snapshot updated");
return Ok(());
}
std::thread::sleep(poll_interval);
}
eprintln!("[write] IPC timeout ({}s) — falling back to direct write", timeout.as_secs());
let _ = std::fs::remove_file(&patch_file);
let content_at_start = std::fs::read_to_string(file)
.with_context(|| format!("failed to read {}", file.display()))?;
let base = baseline.unwrap_or(&content_at_start);
let mut content_ours = template::apply_patches(base, &patches, &unmatched, file)
.context("failed to apply template patches")?;
if let Some(ref yaml) = frontmatter_yaml {
content_ours = crate::frontmatter::merge_fields(&content_ours, yaml)
.context("failed to apply frontmatter patch")?;
}
let doc_lock = acquire_doc_lock(file)?;
let content_current = std::fs::read_to_string(file)
.with_context(|| format!("failed to re-read {}", file.display()))?;
let (final_content, crdt_state) = if content_current == base {
let doc = crate::crdt::CrdtDoc::from_text(&content_ours);
(content_ours.clone(), doc.encode_state())
} else {
eprintln!("[write] File was modified during response generation. CRDT merging...");
let crdt_state = snapshot::load_crdt(file)?;
merge::merge_contents_crdt(crdt_state.as_deref(), &content_ours, &content_current)?
};
atomic_write(file, &final_content)?;
snapshot::save(file, &final_content)?;
snapshot::save_crdt(file, &crdt_state)?;
drop(doc_lock);
recover::clear_pending(file)?;
eprintln!(
"[write] Stream patches applied to {} ({} components patched, CRDT fallback)",
file.display(),
patches.len()
);
Ok(())
}
pub fn apply_append_from_string(file: &Path, response: &str) -> Result<()> {
let response = strip_assistant_heading(response);
let content = std::fs::read_to_string(file)
.with_context(|| format!("failed to read {}", file.display()))?;
let mut content_ours = content.clone();
if !content_ours.ends_with('\n') {
content_ours.push('\n');
}
content_ours.push_str("## Assistant\n\n");
content_ours.push_str(&response);
if !response.ends_with('\n') {
content_ours.push('\n');
}
content_ours.push_str("\n## User\n\n");
let doc_lock = acquire_doc_lock(file)?;
let content_current = std::fs::read_to_string(file)
.with_context(|| format!("failed to re-read {}", file.display()))?;
let final_content = if content_current == content {
content_ours.clone()
} else {
merge::merge_contents(&content, &content_ours, &content_current)?
};
atomic_write(file, &final_content)?;
snapshot::save(file, &content_ours)?;
drop(doc_lock);
eprintln!("[write] Response appended to {}", file.display());
Ok(())
}
pub fn apply_template_from_string(file: &Path, response: &str) -> Result<()> {
let content = std::fs::read_to_string(file)
.with_context(|| format!("failed to read {}", file.display()))?;
let (mut patches, unmatched) = template::parse_patches(response)
.context("failed to parse patch blocks from response")?;
sanitize_patches(&mut patches);
let content_ours = template::apply_patches(&content, &patches, &unmatched, file)
.context("failed to apply template patches")?;
let doc_lock = acquire_doc_lock(file)?;
let content_current = std::fs::read_to_string(file)
.with_context(|| format!("failed to re-read {}", file.display()))?;
let final_content = if content_current == content {
content_ours.clone()
} else {
merge::merge_contents(&content, &content_ours, &content_current)?
};
atomic_write(file, &final_content)?;
snapshot::save(file, &content_ours)?;
drop(doc_lock);
eprintln!("[write] Template patches applied to {}", file.display());
Ok(())
}
fn read_ack_content_sidecar(project_root: &Path, patch_id: &str) -> Result<Option<String>> {
let sidecar = project_root.join(".agent-doc/ack-content").join(format!("{patch_id}.md"));
if !sidecar.exists() {
return Ok(None);
}
let content = std::fs::read_to_string(&sidecar)
.with_context(|| format!("failed to read ack-content sidecar {sidecar:?}"))?;
let _ = std::fs::remove_file(&sidecar);
Ok(Some(content))
}
pub fn try_ipc(
file: &Path,
patches: &[crate::template::PatchBlock],
unmatched: &str,
frontmatter_yaml: Option<&str>,
baseline: Option<&str>,
content_ours: Option<&str>,
normalize_prefix_lines: Option<&[String]>,
) -> Result<bool> {
let canonical = file.canonicalize()?;
let hash = snapshot::doc_hash(file)?;
let project_root = resolve_ipc_project_root(&canonical);
if crate::ipc_socket::is_listener_active(&project_root) {
let patches_dir_for_socket = project_root.join(".agent-doc/patches");
if patches_dir_for_socket.exists() {
let stale_patch_file = patches_dir_for_socket.join(format!("{}.json", hash));
if stale_patch_file.exists() {
eprintln!("[write] cleaning stale patch file before socket send (prevent double-apply)");
if let Err(e) = std::fs::remove_file(&stale_patch_file) {
eprintln!("[write] WARNING: failed to clean stale patch file: {}", e);
}
}
}
let ipc_patches_json = build_ipc_patches_json(file, patches, unmatched, normalize_prefix_lines)?;
let effective_unmatched_socket = if patches.is_empty() && !ipc_patches_json.is_empty() {
eprintln!("[write] synthesis consumed unmatched content — clearing from socket payload (prevent double-apply)");
""
} else {
unmatched.trim()
};
let patch_id = uuid::Uuid::new_v4().to_string();
let mut socket_payload = serde_json::json!({
"type": "patch",
"file": canonical.to_string_lossy(),
"patches": ipc_patches_json,
"unmatched": effective_unmatched_socket,
"baseline": baseline.unwrap_or(""),
"reposition_boundary": true,
});
socket_payload["patch_id"] = serde_json::Value::String(patch_id.clone());
if let Some(yaml) = frontmatter_yaml {
socket_payload["frontmatter"] = serde_json::Value::String(yaml.to_string());
}
if let Some(lines) = normalize_prefix_lines
&& !lines.is_empty()
{
socket_payload["normalize_prefix_lines"] = serde_json::Value::Array(
lines.iter().map(|l| serde_json::Value::String(l.clone())).collect()
);
if ipc_patches_json.is_empty() && let Some(ours) = content_ours {
socket_payload["fullContent"] = serde_json::Value::String(ours.to_string());
}
}
match crate::ipc_socket::send_message(&project_root, &socket_payload) {
Ok(Some(_ack)) => {
eprintln!("[write] socket IPC patch delivered");
let (snap_source, snap_content) = match read_ack_content_sidecar(&project_root, &patch_id) {
Ok(Some(content)) => {
eprintln!("[write] snapshot from ack-content sidecar ({} bytes)", content.len());
("ack_content_sidecar", content)
}
_ => {
std::thread::sleep(std::time::Duration::from_millis(200));
eprintln!("[write] snapshot from file read (ack-content not available)");
let content = std::fs::read_to_string(file)
.with_context(|| format!("failed to read {} after socket IPC", file.display()))?;
("file_read", content)
}
};
crate::ops_log::log_op(file, &format!(
"ipc_socket_delivered file={} snap_source={} snap_len={}",
file.display(), snap_source, snap_content.len()
));
if let Err(e) = snapshot::save(file, &snap_content) {
eprintln!(
"[write] WARNING: IPC write succeeded but snapshot save failed: {}. \
Commit will auto-recover via divergence detection.",
e
);
crate::ops_log::log_op(file, &format!(
"snapshot_save_failed_after_ipc file={} error={}",
file.display(), e
));
} else {
crate::ops_log::log_op(file, &format!(
"snapshot_saved_socket_ipc file={} snap_len={}",
file.display(), snap_content.len()
));
let crdt_doc = crate::crdt::CrdtDoc::from_text(&snap_content);
if let Err(e) = snapshot::save_crdt(file, &crdt_doc.encode_state()) {
eprintln!("[write] WARNING: CRDT state save failed: {}", e);
}
}
return Ok(true);
}
Ok(None) => {
eprintln!("[write] socket IPC sent but no ack — falling back to file IPC");
}
Err(e) => {
eprintln!("[write] socket IPC failed: {} — falling back to file IPC", e);
}
}
}
let patches_dir = project_root.join(".agent-doc/patches");
if !patches_dir.exists() {
return Ok(false);
}
let patch_file = patches_dir.join(format!("{}.json", hash));
let ipc_patches = build_ipc_patches_json(file, patches, unmatched, normalize_prefix_lines)?;
let effective_unmatched_file = if patches.is_empty() && !ipc_patches.is_empty() {
""
} else {
unmatched.trim()
};
let patch_id = uuid::Uuid::new_v4().to_string();
let mut ipc_payload = serde_json::json!({
"file": canonical.to_string_lossy(),
"patches": ipc_patches,
"unmatched": effective_unmatched_file,
"baseline": baseline.unwrap_or(""),
"reposition_boundary": true,
});
ipc_payload["patch_id"] = serde_json::Value::String(patch_id);
if let Some(yaml) = frontmatter_yaml {
ipc_payload["frontmatter"] = serde_json::Value::String(yaml.to_string());
}
if let Some(lines) = normalize_prefix_lines
&& !lines.is_empty()
{
ipc_payload["normalize_prefix_lines"] = serde_json::Value::Array(
lines.iter().map(|l| serde_json::Value::String(l.clone())).collect()
);
if ipc_patches.is_empty() && let Some(ours) = content_ours {
ipc_payload["fullContent"] = serde_json::Value::String(ours.to_string());
}
}
crate::ops_log::log_op(file, &format!(
"ipc_write_attempt file={} hash={} patches={} ipc_patches={} unmatched_len={}",
file.display(), hash, patches.len(), ipc_patches.len(), unmatched.trim().len()
));
if ipc_patches.is_empty() && !unmatched.trim().is_empty() {
eprintln!(
"[write] WARNING: {} bytes of unmatched content with no IPC patches — content will be dropped. \
Does the target file have template components (<!-- agent:exchange -->)?",
unmatched.trim().len()
);
crate::ops_log::log_op(file, &format!(
"ipc_unmatched_content_dropped file={} unmatched_len={}",
file.display(), unmatched.trim().len()
));
}
write_ipc_and_poll(&patch_file, &ipc_payload, file, patches.len(), content_ours, &project_root)
}
pub fn try_ipc_full_content(
file: &Path,
content: &str,
) -> Result<bool> {
let canonical = file.canonicalize()?;
let project_root = resolve_ipc_project_root(&canonical);
if crate::ipc_socket::is_listener_active(&project_root) {
let socket_payload = serde_json::json!({
"type": "patch",
"file": canonical.to_string_lossy(),
"patches": [],
"unmatched": "",
"fullContent": content,
});
match crate::ipc_socket::send_message(&project_root, &socket_payload) {
Ok(Some(_ack)) => {
eprintln!("[write] socket IPC full content delivered");
snapshot::save(file, content)?;
let crdt_doc = crate::crdt::CrdtDoc::from_text(content);
snapshot::save_crdt(file, &crdt_doc.encode_state())?;
return Ok(true);
}
Ok(None) => {
eprintln!("[write] socket IPC full content sent but no ack — falling back to file IPC");
}
Err(e) => {
eprintln!("[write] socket IPC full content failed: {} — falling back to file IPC", e);
}
}
}
let hash = snapshot::doc_hash(file)?;
let patches_dir = project_root.join(".agent-doc/patches");
if !patches_dir.exists() {
return Ok(false);
}
let patch_file = patches_dir.join(format!("{}.json", hash));
let ipc_payload = serde_json::json!({
"file": canonical.to_string_lossy(),
"patches": [],
"unmatched": "",
"baseline": "",
"fullContent": content,
});
write_ipc_and_poll(&patch_file, &ipc_payload, file, 0, Some(content), &project_root)
}
pub fn try_ipc_reposition_boundary(file: &Path) -> bool {
let canonical = match file.canonicalize() {
Ok(c) => c,
Err(_) => return false,
};
let project_root = resolve_ipc_project_root(&canonical);
if !crate::ipc_socket::is_listener_active(&project_root) {
return false;
}
match crate::ipc_socket::send_reposition(&project_root, &canonical.to_string_lossy()) {
Ok(true) => {
eprintln!("[commit] IPC reposition boundary signal sent");
true
}
Ok(false) => {
eprintln!("[commit] IPC reposition: no ack (non-fatal)");
false
}
Err(e) => {
eprintln!("[commit] IPC reposition failed (non-fatal): {}", e);
false
}
}
}
fn write_ipc_and_poll(
patch_file: &Path,
payload: &serde_json::Value,
doc_file: &Path,
patch_count: usize,
_content_ours: Option<&str>,
project_root: &Path,
) -> Result<bool> {
atomic_write(
patch_file,
&serde_json::to_string_pretty(payload)?,
)?;
eprintln!(
"[write] IPC patch written to {} ({} components)",
patch_file.display(),
patch_count
);
let timeout = std::time::Duration::from_secs(2);
let poll_interval = std::time::Duration::from_millis(100);
let start = std::time::Instant::now();
while start.elapsed() < timeout {
if !patch_file.exists() {
let patch_id = payload.get("patch_id").and_then(|v| v.as_str()).unwrap_or("");
let current_on_disk = if !patch_id.is_empty() {
match read_ack_content_sidecar(project_root, patch_id) {
Ok(Some(content)) => {
eprintln!("[write] snapshot from ack-content sidecar ({} bytes)", content.len());
content
}
_ => {
std::thread::sleep(std::time::Duration::from_millis(200));
std::fs::read_to_string(doc_file).unwrap_or_default()
}
}
} else {
std::thread::sleep(std::time::Duration::from_millis(200));
std::fs::read_to_string(doc_file).unwrap_or_default()
};
let baseline_content = payload.get("baseline")
.and_then(|v| v.as_str())
.unwrap_or("");
if !baseline_content.is_empty() && current_on_disk == baseline_content {
eprintln!(
"[write] IPC patch consumed but file unchanged on disk — plugin may have failed to apply. Falling back to disk write."
);
return Ok(false);
}
let patch_list = payload.get("patches")
.and_then(|v| v.as_array());
if let Some(patches) = patch_list {
let has_content_patch = patches.iter().any(|p| {
let content = p.get("content").and_then(|c| c.as_str()).unwrap_or("");
!content.trim().is_empty()
});
if has_content_patch {
let any_present = patches.iter().any(|p| {
let content = p.get("content").and_then(|c| c.as_str()).unwrap_or("");
if content.trim().is_empty() { return true; }
content.lines()
.find(|l| !l.trim().is_empty())
.is_none_or(|first_line| current_on_disk.contains(first_line.trim()))
});
if !any_present {
eprintln!(
"[write] IPC patch consumed but response content not found in file — plugin may have partially failed. Falling back to disk write."
);
return Ok(false);
}
}
}
let snap_content = current_on_disk;
crate::ops_log::log_op(doc_file, &format!(
"ipc_file_delivered file={} snap_len={}",
doc_file.display(), snap_content.len()
));
if let Err(e) = snapshot::save(doc_file, &snap_content) {
eprintln!(
"[write] WARNING: IPC write succeeded but snapshot save failed: {}. \
Commit will auto-recover via divergence detection.",
e
);
crate::ops_log::log_op(doc_file, &format!(
"snapshot_save_failed_after_ipc file={} error={}",
doc_file.display(), e
));
} else {
crate::ops_log::log_op(doc_file, &format!(
"snapshot_saved_file_ipc file={} snap_len={}",
doc_file.display(), snap_content.len()
));
let crdt_doc = crate::crdt::CrdtDoc::from_text(&snap_content);
if let Err(e) = snapshot::save_crdt(doc_file, &crdt_doc.encode_state()) {
eprintln!("[write] WARNING: CRDT state save failed: {}", e);
}
eprintln!("[write] IPC patch consumed by plugin — snapshot updated");
}
return Ok(true);
}
std::thread::sleep(poll_interval);
}
eprintln!("[write] IPC timeout ({}s) — falling back to direct write", timeout.as_secs());
let _ = std::fs::remove_file(patch_file);
Ok(false)
}
fn normalize_patch_content(content: &str, prefix_lines: &[String]) -> String {
if prefix_lines.is_empty() {
return content.to_string();
}
let prefix_set: std::collections::HashSet<&str> =
prefix_lines.iter().map(|s| s.as_str()).collect();
let mut result = String::with_capacity(content.len() + 2 * prefix_lines.len());
for line in content.lines() {
let bare = line.strip_prefix("\u{276f} ").unwrap_or(line);
if prefix_set.contains(bare) && !line.starts_with("\u{276f} ") {
result.push_str("\u{276f} ");
}
result.push_str(line);
result.push('\n');
}
if !content.ends_with('\n') && result.ends_with('\n') {
result.truncate(result.len() - 1);
}
result
}
fn build_ipc_patches_json(
file: &Path,
patches: &[crate::template::PatchBlock],
unmatched: &str,
normalize_prefix_lines: Option<&[String]>,
) -> Result<Vec<serde_json::Value>> {
let raw_doc = std::fs::read_to_string(file).unwrap_or_default();
let current_doc = template::reposition_boundary_to_end_with_summary(
&raw_doc,
file.file_stem().and_then(|s| s.to_str()),
);
let mut ipc_patches: Vec<serde_json::Value> = patches
.iter()
.filter(|p| p.name != "frontmatter")
.map(|p| {
let content = match normalize_prefix_lines {
Some(prefix_lines) if !prefix_lines.is_empty() && is_append_mode_component(&p.name) => {
normalize_patch_content(&p.content, prefix_lines)
}
_ => p.content.clone(),
};
let mut patch_json = serde_json::json!({
"component": p.name,
"content": content,
});
if let Some(bid) = find_boundary_id(¤t_doc, &p.name) {
patch_json["boundary_id"] = serde_json::Value::String(bid);
} else if is_append_mode_component(&p.name) {
patch_json["ensure_boundary"] = serde_json::Value::Bool(true);
}
patch_json
})
.collect();
let effective_unmatched = unmatched.trim().to_string();
if ipc_patches.is_empty() && !effective_unmatched.is_empty() {
let parsed_comps = crate::component::parse(¤t_doc).unwrap_or_default();
for target in &["exchange", "output"] {
let already_present = parsed_comps.iter().any(|c| {
c.name == *target && {
let body = ¤t_doc[c.open_end..c.close_start];
body.contains(effective_unmatched.as_str())
}
});
if already_present {
eprintln!(
"[write] dedup: content already present in {} — skipping synthesis",
target
);
break;
}
if let Some(bid) = find_boundary_id(¤t_doc, target) {
eprintln!(
"[write] synthesizing {} patch for unmatched content (boundary {})",
target, &bid[..8.min(bid.len())]
);
ipc_patches.push(serde_json::json!({
"component": target,
"content": &effective_unmatched,
"boundary_id": bid,
}));
break;
} else if is_append_mode_component(target) {
eprintln!(
"[write] synthesizing {} patch for unmatched content (ensure_boundary)",
target
);
ipc_patches.push(serde_json::json!({
"component": target,
"content": &effective_unmatched,
"ensure_boundary": true,
}));
break;
}
}
}
Ok(ipc_patches)
}
pub fn sanitize_component_tags(content: &str) -> String {
let bytes = content.as_bytes();
let len = bytes.len();
let mut result = String::with_capacity(len);
let mut pos = 0;
while pos + 4 <= len {
if &bytes[pos..pos + 4] != b"<!--" {
let ch_len = utf8_char_len(bytes[pos]);
result.push_str(&content[pos..pos + ch_len]);
pos += ch_len;
continue;
}
let close = match find_comment_close(bytes, pos + 4) {
Some(c) => c, None => {
result.push_str("<!--");
pos += 4;
continue;
}
};
let inner = &content[pos + 4..close - 3];
let trimmed = inner.trim();
if component::is_agent_marker(trimmed) {
let original = &content[pos..close];
result.push_str(&original.replace('<', "<").replace('>', ">"));
} else {
result.push_str(&content[pos..close]);
}
pos = close;
}
if pos < len {
result.push_str(&content[pos..]);
}
result
}
fn utf8_char_len(first_byte: u8) -> usize {
match first_byte {
0x00..=0x7F => 1,
0xC0..=0xDF => 2,
0xE0..=0xEF => 3,
0xF0..=0xFF => 4,
_ => 1, }
}
fn find_comment_close(bytes: &[u8], start: usize) -> Option<usize> {
let len = bytes.len();
let mut i = start;
while i + 3 <= len {
if &bytes[i..i + 3] == b"-->" {
return Some(i + 3);
}
i += 1;
}
None
}
fn sanitize_patches(patches: &mut [template::PatchBlock]) {
for patch in patches.iter_mut() {
patch.content = sanitize_component_tags(&patch.content);
}
}
pub fn strip_assistant_heading(response: &str) -> String {
let mut result = response.to_string();
let trimmed = result.trim_start();
if let Some(rest) = trimmed.strip_prefix("## Assistant") {
let rest = rest.strip_prefix('\n').unwrap_or(rest);
let rest = rest.trim_start_matches('\n');
result = rest.to_string();
}
let trimmed_end = result.trim_end();
if let Some(before) = trimmed_end.strip_suffix("## User") {
result = before.trim_end_matches('\n').to_string();
if !result.ends_with('\n') {
result.push('\n');
}
}
result
}
fn acquire_doc_lock(path: &Path) -> Result<std::fs::File> {
let lock_path = crate::snapshot::lock_path_for(path)?;
if let Some(parent) = lock_path.parent() {
std::fs::create_dir_all(parent)?;
}
let file = OpenOptions::new()
.create(true)
.write(true)
.truncate(false)
.open(&lock_path)
.with_context(|| format!("failed to open doc lock {}", lock_path.display()))?;
file.lock_exclusive()
.with_context(|| format!("failed to acquire doc lock on {}", lock_path.display()))?;
Ok(file)
}
pub fn atomic_write_pub(path: &Path, content: &str) -> Result<()> {
atomic_write(path, content)
}
fn atomic_write(path: &Path, content: &str) -> Result<()> {
use std::io::Write;
let parent = path.parent().unwrap_or(Path::new("."));
let mut tmp = tempfile::NamedTempFile::new_in(parent)
.with_context(|| format!("failed to create temp file in {}", parent.display()))?;
tmp.write_all(content.as_bytes())
.with_context(|| "failed to write temp file")?;
tmp.persist(path)
.with_context(|| format!("failed to rename temp file to {}", path.display()))?;
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
use tempfile::TempDir;
#[test]
fn write_appends_response() {
let dir = TempDir::new().unwrap();
let doc = dir.path().join("test.md");
fs::write(&doc, "---\nsession: test\n---\n\n## User\n\nHello\n").unwrap();
let base = fs::read_to_string(&doc).unwrap();
let response = "This is the assistant response.";
let mut content_ours = base.clone();
if !content_ours.ends_with('\n') {
content_ours.push('\n');
}
content_ours.push_str("## Assistant\n\n");
content_ours.push_str(response);
content_ours.push('\n');
content_ours.push_str("\n## User\n\n");
atomic_write(&doc, &content_ours).unwrap();
let result = fs::read_to_string(&doc).unwrap();
assert!(result.contains("## Assistant\n\nThis is the assistant response."));
assert!(result.contains("\n\n## User\n\n"));
assert!(result.contains("## User\n\nHello"));
}
#[test]
fn write_updates_snapshot() {
let dir = TempDir::new().unwrap();
let doc = dir.path().join("test.md");
let content = "---\nsession: test\n---\n\n## User\n\nHello\n\n## Assistant\n\nResponse\n\n## User\n\n";
fs::write(&doc, content).unwrap();
let snap_path = snapshot::path_for(&doc).unwrap();
assert!(snap_path.to_string_lossy().contains(".agent-doc/snapshots/"));
let snap_abs = dir.path().join(&snap_path);
fs::create_dir_all(snap_abs.parent().unwrap()).unwrap();
fs::write(&snap_abs, content).unwrap();
let loaded = fs::read_to_string(&snap_abs).unwrap();
assert_eq!(loaded, content);
}
#[test]
fn write_preserves_user_edits_via_merge() {
let base = "---\nsession: test\n---\n\n## User\n\nOriginal question\n";
let response = "My response";
let mut ours = base.to_string();
ours.push_str("\n## Assistant\n\n");
ours.push_str(response);
ours.push_str("\n\n## User\n\n");
let theirs = "---\nsession: test\n---\n\n## User\n\nOriginal question\nAnd a follow-up!\n";
let merged = merge::merge_contents(base, &ours, theirs).unwrap();
assert!(merged.contains("My response"), "response missing from merge");
assert!(merged.contains("And a follow-up!"), "user edit missing from merge");
}
#[test]
fn write_no_merge_when_unchanged() {
let base = "---\nsession: test\n---\n\n## User\n\nHello\n";
let response = "Response here";
let mut ours = base.to_string();
ours.push_str("\n## Assistant\n\n");
ours.push_str(response);
ours.push_str("\n\n## User\n\n");
let dir = TempDir::new().unwrap();
let doc = dir.path().join("test.md");
fs::write(&doc, base).unwrap();
let doc_lock = acquire_doc_lock(&doc).unwrap();
let content_current = fs::read_to_string(&doc).unwrap();
let final_content = if content_current == base {
ours.clone()
} else {
merge::merge_contents(base, &ours, &content_current).unwrap()
};
drop(doc_lock);
assert_eq!(final_content, ours);
}
#[test]
fn atomic_write_correct_content() {
let dir = TempDir::new().unwrap();
let path = dir.path().join("atomic.md");
atomic_write(&path, "hello world").unwrap();
assert_eq!(fs::read_to_string(&path).unwrap(), "hello world");
}
#[test]
fn concurrent_writes_no_corruption() {
use std::sync::{Arc, Barrier};
let dir = TempDir::new().unwrap();
let path = dir.path().join("concurrent.md");
fs::write(&path, "initial").unwrap();
let n = 20;
let barrier = Arc::new(Barrier::new(n));
let mut handles = Vec::new();
for i in 0..n {
let p = path.clone();
let parent = dir.path().to_path_buf();
let bar = Arc::clone(&barrier);
let content = format!("writer-{}-content", i);
handles.push(std::thread::spawn(move || {
bar.wait();
let mut tmp = tempfile::NamedTempFile::new_in(&parent).unwrap();
std::io::Write::write_all(&mut tmp, content.as_bytes()).unwrap();
tmp.persist(&p).unwrap();
}));
}
for h in handles {
h.join().unwrap();
}
let final_content = fs::read_to_string(&path).unwrap();
assert!(
final_content.starts_with("writer-") && final_content.ends_with("-content"),
"unexpected content: {}",
final_content
);
}
#[test]
fn snapshot_matches_disk_state() {
let dir = TempDir::new().unwrap();
let agent_doc_dir = dir.path().join(".agent-doc").join("snapshots");
fs::create_dir_all(&agent_doc_dir).unwrap();
let doc = dir.path().join("test.md");
let base = "---\nsession: test\n---\n\n## User\n\nOriginal question\n";
fs::write(&doc, base).unwrap();
let response = "Agent response here";
let mut content_ours = base.to_string();
content_ours.push_str("\n## Assistant\n\n");
content_ours.push_str(response);
content_ours.push_str("\n\n## User\n\n");
let user_edited = format!("{}Follow-up question\n", base);
fs::write(&doc, &user_edited).unwrap();
let merged = merge::merge_contents(base, &content_ours, &user_edited).unwrap();
atomic_write(&doc, &merged).unwrap();
assert!(merged.contains(response), "response missing from merged");
assert!(merged.contains("Follow-up question"), "user edit missing from merged");
snapshot::save(&doc, &merged).unwrap();
let snap = snapshot::load(&doc).unwrap().unwrap();
let current = fs::read_to_string(&doc).unwrap();
assert_eq!(snap, current, "snapshot must match actual disk state after write");
assert!(snap.contains(response), "snapshot should contain agent response");
assert!(snap.contains("Follow-up question"), "snapshot should contain merged user edit");
}
#[test]
fn try_ipc_returns_false_when_no_patches_dir() {
let dir = TempDir::new().unwrap();
let doc = dir.path().join("test.md");
fs::write(&doc, "content").unwrap();
let patches: Vec<crate::template::PatchBlock> = vec![];
let result = try_ipc(&doc, &patches, "", None, None, None, None).unwrap();
assert!(!result, "should return false when patches dir doesn't exist");
}
#[test]
fn try_ipc_times_out_when_no_plugin() {
let dir = TempDir::new().unwrap();
let agent_doc_dir = dir.path().join(".agent-doc");
fs::create_dir_all(agent_doc_dir.join("patches")).unwrap();
fs::create_dir_all(agent_doc_dir.join("snapshots")).unwrap();
fs::create_dir_all(agent_doc_dir.join("crdt")).unwrap();
let doc = dir.path().join("test.md");
fs::write(&doc, "---\nsession: test\n---\n\n<!-- agent:exchange -->\ncontent\n<!-- /agent:exchange -->\n").unwrap();
let patch = crate::template::PatchBlock::new("exchange", "new content");
let result = try_ipc(&doc, &[patch], "", None, None, None, None).unwrap();
assert!(!result, "should return false on timeout (no plugin)");
let patches_dir = agent_doc_dir.join("patches");
let entries: Vec<_> = fs::read_dir(&patches_dir)
.unwrap()
.filter_map(|e| e.ok())
.collect();
assert!(entries.is_empty(), "patch file should be cleaned up after timeout");
}
#[test]
fn try_ipc_succeeds_when_plugin_consumes() {
let dir = TempDir::new().unwrap();
let agent_doc_dir = dir.path().join(".agent-doc");
fs::create_dir_all(agent_doc_dir.join("patches")).unwrap();
fs::create_dir_all(agent_doc_dir.join("snapshots")).unwrap();
fs::create_dir_all(agent_doc_dir.join("crdt")).unwrap();
let doc = dir.path().join("test.md");
fs::write(&doc, "---\nsession: test\n---\n\n<!-- agent:exchange -->\ncontent\n<!-- /agent:exchange -->\n").unwrap();
let patch = crate::template::PatchBlock::new("exchange", "new content");
let patches_dir = agent_doc_dir.join("patches");
let watcher_dir = patches_dir.clone();
let doc_for_watcher = doc.clone();
let _watcher = std::thread::spawn(move || {
for _ in 0..20 {
std::thread::sleep(std::time::Duration::from_millis(50));
if let Ok(entries) = fs::read_dir(&watcher_dir) {
for entry in entries.flatten() {
if entry.path().extension().is_some_and(|e| e == "json") {
let _ = fs::write(&doc_for_watcher,
"---\nsession: test\n---\n\n<!-- agent:exchange -->\nnew content\n<!-- /agent:exchange -->\n");
let _ = fs::remove_file(entry.path());
return;
}
}
}
}
});
let result = try_ipc(&doc, &[patch], "", None, None, None, None).unwrap();
assert!(result, "should return true when plugin consumes patch");
}
#[test]
fn try_ipc_full_content_returns_false_when_no_patches_dir() {
let dir = TempDir::new().unwrap();
let doc = dir.path().join("test.md");
fs::write(&doc, "content").unwrap();
let result = try_ipc_full_content(&doc, "new content").unwrap();
assert!(!result, "should return false when patches dir doesn't exist");
}
#[test]
fn sanitize_escapes_open_agent_tag() {
let input = "Here is an example: <!-- agent:exchange --> marker.";
let result = sanitize_component_tags(input);
assert!(
result.contains("<!-- agent:exchange -->"),
"open agent tag should be escaped, got: {}",
result
);
assert!(
!result.contains("<!-- agent:exchange -->"),
"raw open agent tag should not remain"
);
}
#[test]
fn sanitize_escapes_close_agent_tag() {
let input = "End marker: <!-- /agent:pending --> done.";
let result = sanitize_component_tags(input);
assert!(
result.contains("<!-- /agent:pending -->"),
"close agent tag should be escaped, got: {}",
result
);
assert!(
!result.contains("<!-- /agent:pending -->"),
"raw close agent tag should not remain"
);
}
#[test]
fn sanitize_does_not_escape_patch_markers() {
let input = "<!-- patch:exchange -->\nsome content\n<!-- /patch:exchange -->\n";
let result = sanitize_component_tags(input);
assert_eq!(result, input, "patch markers must not be escaped");
}
#[test]
fn sanitize_passes_normal_content_through() {
let input = "Just some normal markdown content.\n\nWith paragraphs and **bold**.";
let result = sanitize_component_tags(input);
assert_eq!(result, input, "normal content should pass through unchanged");
}
#[test]
fn sanitize_preserves_utf8_em_dash() {
let input = "This is a test \u{2014} with em dashes \u{2014} in content.";
let result = sanitize_component_tags(input);
assert_eq!(result, input, "em dashes must survive sanitization unchanged");
assert_eq!(
result.as_bytes(),
input.as_bytes(),
"byte-level content must be identical"
);
}
#[test]
fn sanitize_preserves_mixed_utf8_and_agent_tags() {
let input = "Response with \u{2014} em dash and <!-- agent:exchange --> tag reference.";
let result = sanitize_component_tags(input);
assert!(
result.contains("\u{2014}"),
"em dash must be preserved, got: {:?}",
result
);
assert!(
result.contains("<!-- agent:exchange -->"),
"agent tag must be escaped"
);
}
#[test]
fn sanitize_preserves_various_unicode() {
let input = "Caf\u{00E9} \u{2019}quotes\u{2019} \u{2014} \u{2026} \u{1F600}";
let result = sanitize_component_tags(input);
assert_eq!(result, input, "all unicode must survive sanitization");
}
#[test]
fn try_ipc_snapshot_saves_disk_state() {
let dir = TempDir::new().unwrap();
let agent_doc_dir = dir.path().join(".agent-doc");
fs::create_dir_all(agent_doc_dir.join("patches")).unwrap();
fs::create_dir_all(agent_doc_dir.join("snapshots")).unwrap();
fs::create_dir_all(agent_doc_dir.join("crdt")).unwrap();
let doc = dir.path().join("test.md");
let original = "---\nsession: test\n---\n\n<!-- agent:exchange -->\noriginal content\n<!-- agent:boundary:test-boundary-123 -->\n<!-- /agent:exchange -->\n";
fs::write(&doc, original).unwrap();
let patch = crate::template::PatchBlock::new("exchange", "agent response content");
let content_ours = "---\nsession: test\n---\n\n<!-- agent:exchange -->\nagent response content\n<!-- /agent:exchange -->\n";
let after_plugin_write = "---\nsession: test\n---\n\n<!-- agent:exchange -->\nagent response content\nuser typed something new\n<!-- /agent:exchange -->\n";
let patches_dir = agent_doc_dir.join("patches");
let watcher_dir = patches_dir.clone();
let doc_for_watcher = doc.clone();
let after_plugin_write_owned = after_plugin_write.to_string();
let _watcher = std::thread::spawn(move || {
for _ in 0..20 {
std::thread::sleep(std::time::Duration::from_millis(50));
if let Ok(entries) = fs::read_dir(&watcher_dir) {
for entry in entries.flatten() {
if entry.path().extension().is_some_and(|e| e == "json") {
let _ = fs::write(&doc_for_watcher, &after_plugin_write_owned);
let _ = fs::remove_file(entry.path());
return;
}
}
}
}
});
let result = try_ipc(
&doc,
&[patch],
"",
None,
Some(original), Some(content_ours), None, )
.unwrap();
assert!(result, "IPC should succeed when plugin consumes patch");
let snap = snapshot::load(&doc).unwrap().unwrap();
assert!(
snap.contains("agent response content"),
"snapshot must contain agent response, got: {}",
snap
);
assert!(
snap.contains("user typed something new"),
"snapshot must match disk state (include user edits written by plugin)"
);
assert_eq!(
snap, after_plugin_write,
"snapshot must exactly match post-write disk state"
);
}
#[test]
fn ipc_json_preserves_utf8_em_dash() {
let content = "Response with \u{2014} em dash.";
let payload = serde_json::json!({
"file": "/tmp/test.md",
"patches": [{
"component": "exchange",
"content": content,
}],
"unmatched": "",
"baseline": "",
});
let json_str = serde_json::to_string_pretty(&payload).unwrap();
let parsed: serde_json::Value = serde_json::from_str(&json_str).unwrap();
let parsed_content = parsed["patches"][0]["content"].as_str().unwrap();
assert_eq!(
parsed_content, content,
"em dash must survive JSON round-trip"
);
assert!(
json_str.contains("\u{2014}"),
"JSON should contain raw UTF-8 em dash"
);
}
#[test]
fn append_mode_component_exchange() {
assert!(is_append_mode_component("exchange"));
assert!(is_append_mode_component("findings"));
}
#[test]
fn replace_mode_components_not_append() {
assert!(!is_append_mode_component("pending"));
assert!(!is_append_mode_component("status"));
assert!(!is_append_mode_component("output"));
assert!(!is_append_mode_component("todo"));
}
#[test]
fn find_boundary_id_skips_code_blocks() {
let content = "<!-- agent:exchange -->\n```\n<!-- agent:boundary:fake-id -->\n```\n<!-- /agent:exchange -->\n";
let result = find_boundary_id(content, "exchange");
assert!(
result.is_none(),
"boundary inside code block must not be found, got: {:?}",
result
);
}
#[test]
fn find_boundary_id_finds_real_marker() {
let content = "<!-- agent:exchange -->\nSome text.\n<!-- agent:boundary:real-uuid-5678 -->\nMore text.\n<!-- /agent:exchange -->\n";
let result = find_boundary_id(content, "exchange");
assert_eq!(result, Some("real-uuid-5678".to_string()));
}
#[test]
fn stale_baseline_guard_prefix_check() {
let snapshot = "## Exchange\nResponse here.\n";
let baseline_with_user_edit = "## Exchange\nResponse here.\nNew user question\n";
let snap_clean = strip_boundary_for_dedup(snapshot);
let base_clean = strip_boundary_for_dedup(baseline_with_user_edit);
assert!(
base_clean.starts_with(&snap_clean),
"baseline with user edits should start with snapshot content"
);
let stale_baseline = "## Exchange\nOld content only.\n";
let stale_clean = strip_boundary_for_dedup(stale_baseline);
assert!(
!stale_clean.starts_with(&snap_clean),
"stale baseline should not start with snapshot content"
);
}
#[test]
fn stale_baseline_identical_content_not_stale() {
let doc = "<!-- agent:exchange patch=append -->\nResponse.\n<!-- /agent:exchange -->\n";
assert!(!is_stale_baseline(doc, doc));
}
#[test]
fn stale_baseline_user_appended_text_not_stale() {
let snapshot = "<!-- agent:exchange patch=append -->\nResponse.\n<!-- /agent:exchange -->\n";
let baseline = "<!-- agent:exchange patch=append -->\nResponse.\nUser question\n<!-- /agent:exchange -->\n";
assert!(!is_stale_baseline(baseline, snapshot));
}
#[test]
fn stale_baseline_user_edited_replace_component_not_stale() {
let snapshot = "<!-- agent:status patch=replace -->\nOld status\n<!-- /agent:status -->\n\
<!-- agent:exchange patch=append -->\nResponse.\n<!-- /agent:exchange -->\n";
let baseline = "<!-- agent:status patch=replace -->\nEdited status by user\n<!-- /agent:status -->\n\
<!-- agent:exchange patch=append -->\nResponse.\nNew question\n<!-- /agent:exchange -->\n";
assert!(
!is_stale_baseline(baseline, snapshot),
"user editing replace-mode status component should NOT trigger stale guard"
);
}
#[test]
fn stale_baseline_missing_committed_content_is_stale() {
let snapshot = "<!-- agent:exchange patch=append -->\nCommitted response from agent.\n<!-- /agent:exchange -->\n";
let baseline = "<!-- agent:exchange patch=append -->\nOld content only.\n<!-- /agent:exchange -->\n";
assert!(
is_stale_baseline(baseline, snapshot),
"baseline missing committed content should be stale"
);
}
#[test]
fn stale_baseline_missing_append_component_is_stale() {
let snapshot = "<!-- agent:exchange patch=append -->\nResponse.\n<!-- /agent:exchange -->\n";
let baseline = "<!-- agent:other patch=append -->\nDifferent.\n<!-- /agent:other -->\n";
assert!(
is_stale_baseline(baseline, snapshot),
"baseline missing an append-mode component should be stale"
);
}
#[test]
fn stale_baseline_missing_replace_component_not_stale() {
let snapshot = "<!-- agent:status patch=replace -->\nActive\n<!-- /agent:status -->\n\
<!-- agent:exchange patch=append -->\nResponse.\n<!-- /agent:exchange -->\n";
let baseline = "<!-- agent:exchange patch=append -->\nResponse.\n<!-- /agent:exchange -->\n";
assert!(
!is_stale_baseline(baseline, snapshot),
"missing replace-mode component should NOT trigger stale guard"
);
}
#[test]
fn stale_baseline_boundary_markers_ignored() {
let snapshot = "<!-- agent:exchange patch=append -->\nResponse.\n<!-- agent:boundary:abc -->\n<!-- /agent:exchange -->\n";
let baseline = "<!-- agent:exchange patch=append -->\nResponse.\n<!-- agent:boundary:xyz -->\nUser edit\n<!-- /agent:exchange -->\n";
assert!(
!is_stale_baseline(baseline, snapshot),
"different boundary marker IDs should not cause false stale detection"
);
}
#[test]
fn stale_baseline_non_template_fallback_to_prefix() {
let snapshot = "## Exchange\nResponse.\n";
let baseline = "## Exchange\nResponse.\nNew question\n";
assert!(!is_stale_baseline(baseline, snapshot));
let stale = "## Exchange\nDifferent content.\n";
assert!(is_stale_baseline(stale, snapshot));
}
#[test]
fn stale_baseline_empty_snapshot_component_skipped() {
let snapshot = "<!-- agent:exchange patch=append -->\n<!-- /agent:exchange -->\n";
let baseline = "<!-- agent:exchange patch=append -->\nUser added content\n<!-- /agent:exchange -->\n";
assert!(!is_stale_baseline(baseline, snapshot));
}
#[test]
fn stale_baseline_default_exchange_is_append() {
let snapshot = "<!-- agent:exchange -->\nResponse.\n<!-- /agent:exchange -->\n";
let baseline = "<!-- agent:exchange -->\nOld stuff.\n<!-- /agent:exchange -->\n";
assert!(
is_stale_baseline(baseline, snapshot),
"exchange without patch attr should default to append-mode check"
);
}
#[test]
fn strip_boundary_for_dedup_removes_markers() {
let with_boundary = "Hello\n<!-- agent:boundary:abc123 -->\nWorld\n";
let without = strip_boundary_for_dedup(with_boundary);
assert!(!without.contains("agent:boundary"));
assert!(without.contains("Hello"));
assert!(without.contains("World"));
}
#[test]
fn synthesis_dedup_skips_when_content_already_present() {
let dir = TempDir::new().unwrap();
let doc = dir.path().join("test.md");
let existing = "This is the agent response.";
let doc_content = format!(
"<!-- agent:exchange patch=append -->\n{}\n<!-- /agent:exchange -->\n",
existing
);
fs::write(&doc, &doc_content).unwrap();
let patches: Vec<crate::template::PatchBlock> = vec![];
let result = build_ipc_patches_json(&doc, &patches, existing, None).unwrap();
assert!(
result.is_empty(),
"synthesis should be skipped when content already exists in target component, \
got {} patches: {:?}",
result.len(),
result
);
}
#[test]
fn synthesis_proceeds_when_content_is_new() {
let dir = TempDir::new().unwrap();
let doc = dir.path().join("test.md");
let doc_content =
"<!-- agent:exchange patch=append -->\nExisting content.\n<!-- /agent:exchange -->\n";
fs::write(&doc, doc_content).unwrap();
let patches: Vec<crate::template::PatchBlock> = vec![];
let new_content = "Completely new agent response.";
let result = build_ipc_patches_json(&doc, &patches, new_content, None).unwrap();
assert_eq!(result.len(), 1, "synthesis should produce one patch for new content");
assert_eq!(
result[0]["component"].as_str().unwrap(),
"exchange",
"synthesized patch should target exchange"
);
assert_eq!(
result[0]["content"].as_str().unwrap(),
new_content,
"synthesized patch content should match unmatched"
);
}
#[test]
fn effective_unmatched_cleared_when_synthesis_consumes_content() {
let patches: Vec<crate::template::PatchBlock> = vec![];
let unmatched = "some response content";
let ipc_patches: Vec<serde_json::Value> = vec![serde_json::json!({
"component": "exchange",
"content": unmatched,
})];
let effective = if patches.is_empty() && !ipc_patches.is_empty() {
""
} else {
unmatched.trim()
};
assert_eq!(
effective, "",
"effective_unmatched must be empty when synthesis consumed content"
);
let explicit_patch = crate::template::PatchBlock::new("exchange", "response");
let patches_explicit = vec![explicit_patch];
let ipc_explicit: Vec<serde_json::Value> = vec![serde_json::json!({
"component": "exchange",
"content": "response",
})];
let effective2 = if patches_explicit.is_empty() && !ipc_explicit.is_empty() {
""
} else {
unmatched.trim()
};
assert_eq!(
effective2,
unmatched.trim(),
"effective_unmatched should pass through when explicit patches exist"
);
let ipc_empty: Vec<serde_json::Value> = vec![];
let effective3 = if patches.is_empty() && !ipc_empty.is_empty() {
""
} else {
unmatched.trim()
};
assert_eq!(
effective3,
unmatched.trim(),
"effective_unmatched should pass through when no synthesis occurred"
);
}
#[test]
fn normalize_user_prompts_new_line_gets_prefix() {
let snapshot = "<!-- agent:exchange patch=append -->\nOld content.\n<!-- /agent:exchange -->\n";
let baseline = "<!-- agent:exchange patch=append -->\nOld content.\nHello\n<!-- /agent:exchange -->\n";
let content = "<!-- agent:exchange patch=append -->\nOld content.\nHello\n<!-- agent:boundary:abc123 -->\n### Re: response\n<!-- /agent:exchange -->\n";
let result = normalize_user_prompts_in_exchange(content, baseline, snapshot);
assert!(result.contains("❯ Hello"), "user line should get ❯ prefix: {}", result);
assert!(result.contains("Old content."), "old content should be preserved");
assert!(result.contains("### Re: response"), "agent response should be preserved");
assert!(!result.contains("❯ ###"), "agent heading should not get prefix: {}", result);
}
#[test]
fn normalize_user_prompts_agent_response_not_prefixed() {
let snapshot = "<!-- agent:exchange patch=append -->\nOld.\n<!-- /agent:exchange -->\n";
let baseline = "<!-- agent:exchange patch=append -->\nOld.\nMy question\n<!-- /agent:exchange -->\n";
let content = "<!-- agent:exchange patch=append -->\nOld.\nMy question\nAgent answer here.\n<!-- agent:boundary:xyz -->\n<!-- /agent:exchange -->\n";
let result = normalize_user_prompts_in_exchange(content, baseline, snapshot);
assert!(result.contains("❯ My question"), "user question should get prefix: {}", result);
assert!(!result.contains("❯ Agent answer"), "agent response should NOT get prefix: {}", result);
assert!(result.contains("Agent answer here."), "agent response should be preserved: {}", result);
}
#[test]
fn normalize_user_prompts_blank_line_skipped() {
let snapshot = "<!-- agent:exchange patch=append -->\nOld.\n<!-- /agent:exchange -->\n";
let baseline = "<!-- agent:exchange patch=append -->\nOld.\n\n<!-- /agent:exchange -->\n";
let content = "<!-- agent:exchange patch=append -->\nOld.\n\n<!-- agent:boundary:abc -->\n<!-- /agent:exchange -->\n";
let result = normalize_user_prompts_in_exchange(content, baseline, snapshot);
assert!(!result.contains("❯ \n"), "blank line should not be prefixed: {}", result);
}
#[test]
fn normalize_user_prompts_heading_skipped() {
let snapshot = "<!-- agent:exchange patch=append -->\n<!-- /agent:exchange -->\n";
let baseline = "<!-- agent:exchange patch=append -->\n### Re: answer\n<!-- /agent:exchange -->\n";
let content = "<!-- agent:exchange patch=append -->\n### Re: answer\n<!-- agent:boundary:abc -->\n<!-- /agent:exchange -->\n";
let result = normalize_user_prompts_in_exchange(content, baseline, snapshot);
assert!(!result.contains("❯ ###"), "heading should not get prefix: {}", result);
}
#[test]
fn normalize_user_prompts_already_prefixed_skipped() {
let snapshot = "<!-- agent:exchange patch=append -->\n<!-- /agent:exchange -->\n";
let baseline = "<!-- agent:exchange patch=append -->\n❯ Already prefixed\n<!-- /agent:exchange -->\n";
let content = "<!-- agent:exchange patch=append -->\n❯ Already prefixed\n<!-- agent:boundary:abc -->\n<!-- /agent:exchange -->\n";
let result = normalize_user_prompts_in_exchange(content, baseline, snapshot);
assert!(!result.contains("❯ ❯"), "should not double-prefix: {}", result);
assert!(result.contains("❯ Already prefixed"), "prefix should be preserved");
}
#[test]
fn normalize_user_prompts_existing_content_unchanged() {
let snapshot = "<!-- agent:exchange patch=append -->\n❯ Previous question\n### Re: answer\n<!-- /agent:exchange -->\n";
let baseline = "<!-- agent:exchange patch=append -->\n❯ Previous question\n### Re: answer\nNew question\n<!-- /agent:exchange -->\n";
let content = "<!-- agent:exchange patch=append -->\n❯ Previous question\n### Re: answer\nNew question\n<!-- agent:boundary:abc -->\n<!-- /agent:exchange -->\n";
let result = normalize_user_prompts_in_exchange(content, baseline, snapshot);
assert!(!result.contains("❯ ❯"), "should not double-prefix existing content: {}", result);
assert!(result.contains("❯ New question"), "new line should get prefix: {}", result);
}
#[test]
fn normalize_user_prompts_code_fence_skipped() {
let snapshot = "<!-- agent:exchange patch=append -->\n<!-- /agent:exchange -->\n";
let baseline = "<!-- agent:exchange patch=append -->\nSome text.\n```bash\necho hello\n```\n<!-- /agent:exchange -->\n";
let content = "<!-- agent:exchange patch=append -->\nSome text.\n```bash\necho hello\n```\n<!-- agent:boundary:abc -->\n<!-- /agent:exchange -->\n";
let result = normalize_user_prompts_in_exchange(content, baseline, snapshot);
assert!(!result.contains("❯ ```"), "code fence marker should not get prefix: {}", result);
assert!(!result.contains("❯ echo hello"), "code fence interior should not get prefix: {}", result);
assert!(result.contains("❯ Some text."), "regular user line should get prefix: {}", result);
}
#[test]
fn normalize_user_prompts_code_fence_interior_skipped() {
let snapshot = "<!-- agent:exchange patch=append -->\n<!-- /agent:exchange -->\n";
let baseline = "<!-- agent:exchange patch=append -->\nQuestion here.\n```rust\nlet x = 1;\nlet y = 2;\n```\nFollow-up.\n<!-- /agent:exchange -->\n";
let content = "<!-- agent:exchange patch=append -->\nQuestion here.\n```rust\nlet x = 1;\nlet y = 2;\n```\nFollow-up.\n<!-- agent:boundary:abc -->\n<!-- /agent:exchange -->\n";
let result = normalize_user_prompts_in_exchange(content, baseline, snapshot);
assert!(result.contains("❯ Question here."), "text before fence should get prefix: {}", result);
assert!(result.contains("❯ Follow-up."), "text after fence should get prefix: {}", result);
assert!(!result.contains("❯ let x"), "fence interior should not get prefix: {}", result);
assert!(!result.contains("❯ let y"), "fence interior should not get prefix: {}", result);
assert!(!result.contains("❯ ```"), "fence marker should not get prefix: {}", result);
}
#[test]
fn normalize_user_prompts_tilde_fence_interior_skipped() {
let snapshot = "<!-- agent:exchange patch=append -->\n<!-- /agent:exchange -->\n";
let baseline = "<!-- agent:exchange patch=append -->\nBefore.\n~~~sh\necho hello\n~~~\nAfter.\n<!-- /agent:exchange -->\n";
let content = "<!-- agent:exchange patch=append -->\nBefore.\n~~~sh\necho hello\n~~~\nAfter.\n<!-- agent:boundary:abc -->\n<!-- /agent:exchange -->\n";
let result = normalize_user_prompts_in_exchange(content, baseline, snapshot);
assert!(result.contains("❯ Before."), "text before tilde fence should get prefix: {result}");
assert!(result.contains("❯ After."), "text after tilde fence should get prefix: {result}");
assert!(!result.contains("❯ echo hello"), "tilde fence interior should not get prefix: {result}");
assert!(!result.contains("❯ ~~~"), "tilde fence marker should not get prefix: {result}");
}
#[test]
fn normalize_user_prompts_quoted_string_skipped() {
let snapshot = "<!-- agent:exchange patch=append -->\n<!-- /agent:exchange -->\n";
let baseline = "<!-- agent:exchange patch=append -->\n\"Merge conflict with external write\"\n<!-- /agent:exchange -->\n";
let content = "<!-- agent:exchange patch=append -->\n\"Merge conflict with external write\"\n<!-- agent:boundary:abc -->\n<!-- /agent:exchange -->\n";
let result = normalize_user_prompts_in_exchange(content, baseline, snapshot);
assert!(!result.contains("❯ \""), "quoted string should not get prefix: {}", result);
}
#[test]
fn normalize_patch_content_applies_prefix_to_matching_lines() {
let patch_content = "transferred line 1\ntransferred line 2\n### Re: Response\nAgent answer\n";
let prefix_lines = vec!["transferred line 1".to_string(), "transferred line 2".to_string()];
let result = normalize_patch_content(patch_content, &prefix_lines);
let expected = "❯ transferred line 1\n❯ transferred line 2\n### Re: Response\nAgent answer\n";
assert_eq!(result, expected, "prefix lines should get ❯ in patch content");
}
#[test]
fn normalize_patch_content_idempotent_already_prefixed() {
let patch_content = "❯ already prefixed\nnot prefixed\n";
let prefix_lines = vec!["already prefixed".to_string(), "not prefixed".to_string()];
let result = normalize_patch_content(patch_content, &prefix_lines);
let expected = "❯ already prefixed\n❯ not prefixed\n";
assert_eq!(result, expected, "already-prefixed lines should not get double prefix");
}
#[test]
fn normalize_patch_content_empty_prefix_lines_passthrough() {
let patch_content = "some line\nanother line\n";
let result = normalize_patch_content(patch_content, &[]);
assert_eq!(result, patch_content, "empty prefix_lines should leave content unchanged");
}
#[test]
fn normalize_patch_content_non_matching_lines_unchanged() {
let patch_content = "agent response line\n### heading\n";
let prefix_lines = vec!["user line".to_string()];
let result = normalize_patch_content(patch_content, &prefix_lines);
assert_eq!(result, patch_content, "non-matching lines should pass through unchanged");
}
#[test]
fn normalize_prefix_lines_skipped_for_replace_mode_components() {
let pending_content = "- [ ] Build Gutenberg replacement HTML for home page\n- [ ] Update page content\n";
let prefix_lines = vec!["- [ ] Build Gutenberg replacement HTML for home page".to_string()];
let is_pending = !is_append_mode_component("pending");
assert!(is_pending, "pending should not be an append-mode component");
let result = if is_append_mode_component("pending") {
normalize_patch_content(pending_content, &prefix_lines)
} else {
pending_content.to_string()
};
assert_eq!(result, pending_content, "agent:pending content must NOT receive ❯ prefix");
assert!(!result.contains("❯ "), "no ❯ prefix should appear in pending patches");
}
#[test]
fn normalize_user_prompts_no_exchange_passthrough() {
let content = "No exchange here.\n";
let baseline = "No exchange here.\n";
let snapshot = "";
let result = normalize_user_prompts_in_exchange(content, baseline, snapshot);
assert_eq!(result, content, "document without exchange should pass through unchanged");
}
#[test]
fn normalize_user_prompts_restores_prefix_lost_in_file() {
let snapshot = "<!-- agent:exchange patch=append -->\n❯ done\n❯ do\n- [ ] task\n<!-- /agent:exchange -->\n";
let baseline = "<!-- agent:exchange patch=append -->\n❯ done\ndo\n- [ ] task\n<!-- /agent:exchange -->\n";
let content = "<!-- agent:exchange patch=append -->\n❯ done\ndo\n- [ ] task\n<!-- agent:boundary:abc123:doc -->\n<!-- /agent:exchange -->\n";
let result = normalize_user_prompts_in_exchange(content, baseline, snapshot);
assert!(result.contains("❯ do"), "❯ prefix must be restored when snapshot had it but file lost it: {}", result);
assert!(!result.contains("\ndo\n"), "bare do line must not remain without prefix: {}", result);
assert!(!result.contains("❯ ❯"), "no double-prefix: {}", result);
}
#[test]
fn normalize_safe_passes_through_under_threshold() {
let tmp = tempfile::TempDir::new().unwrap();
let file = tmp.path().join("doc.md");
std::fs::write(&file, "").unwrap();
let snapshot = "<!-- agent:exchange patch=append -->\nOld.\n<!-- /agent:exchange -->\n";
let baseline = "<!-- agent:exchange patch=append -->\nOld.\nHello\n<!-- /agent:exchange -->\n";
let content = "<!-- agent:exchange patch=append -->\nOld.\nHello\n<!-- agent:boundary:abc -->\n<!-- /agent:exchange -->\n";
let result = normalize_user_prompts_in_exchange_safe(content, baseline, snapshot, &file);
assert!(result.contains("❯ Hello"), "under threshold, ❯ prefix should still be applied: {result}");
}
#[test]
fn normalize_safe_bails_over_threshold() {
let tmp = tempfile::TempDir::new().unwrap();
let file = tmp.path().join("doc.md");
std::fs::write(&file, "").unwrap();
let mut baseline_lines = String::new();
let mut content_lines = String::new();
for i in 0..60 {
baseline_lines.push_str(&format!("user line {i}\n"));
content_lines.push_str(&format!("user line {i}\n"));
}
let snapshot = "<!-- agent:exchange patch=append -->\n<!-- /agent:exchange -->\n";
let baseline = format!("<!-- agent:exchange patch=append -->\n{baseline_lines}<!-- /agent:exchange -->\n");
let content = format!("<!-- agent:exchange patch=append -->\n{content_lines}<!-- agent:boundary:abc -->\n<!-- /agent:exchange -->\n");
let result = normalize_user_prompts_in_exchange_safe(&content, &baseline, snapshot, &file);
assert_eq!(result, content, "over threshold, content should pass through unchanged");
assert!(!result.contains("❯ user line"), "no ❯ prefix should be applied when threshold exceeded");
}
#[test]
fn normalize_safe_threshold_exact_boundary() {
let tmp = tempfile::TempDir::new().unwrap();
let file = tmp.path().join("doc.md");
std::fs::write(&file, "").unwrap();
let mut lines = String::new();
for i in 0..50 {
lines.push_str(&format!("line {i}\n"));
}
let snapshot = "<!-- agent:exchange patch=append -->\n<!-- /agent:exchange -->\n";
let baseline = format!("<!-- agent:exchange patch=append -->\n{lines}<!-- /agent:exchange -->\n");
let content = format!("<!-- agent:exchange patch=append -->\n{lines}<!-- agent:boundary:abc -->\n<!-- /agent:exchange -->\n");
let result = normalize_user_prompts_in_exchange_safe(&content, &baseline, snapshot, &file);
assert!(result.contains("❯ line 0"), "at threshold, first line should get prefix: {result}");
assert!(result.contains("❯ line 49"), "at threshold, last line should get prefix: {result}");
}
}
#[cfg(test)]
mod ack_content_snapshot_tests {
use super::*;
use tempfile::TempDir;
#[test]
fn test_ack_content_sidecar_read() {
let tmp = TempDir::new().unwrap();
let project_root = tmp.path().to_path_buf();
let patch_id = "test-patch-abc123";
let ack_dir = project_root.join(".agent-doc/ack-content");
std::fs::create_dir_all(&ack_dir).unwrap();
let sidecar = ack_dir.join(format!("{patch_id}.md"));
std::fs::write(&sidecar, "applied content from plugin").unwrap();
let result = read_ack_content_sidecar(&project_root, patch_id).unwrap();
assert_eq!(result, Some("applied content from plugin".to_string()));
assert!(!sidecar.exists(), "sidecar should be deleted after read");
}
}
#[cfg(test)]
mod submodule_patch_routing_tests {
use super::*;
use std::process::Command;
use tempfile::TempDir;
fn git(dir: &Path, args: &[&str]) {
let out = Command::new("git")
.current_dir(dir)
.args([
"-c", "user.email=test@example.com",
"-c", "user.name=Test",
"-c", "init.defaultBranch=main",
"-c", "protocol.file.allow=always",
"-c", "commit.gpgsign=false",
])
.args(args)
.output()
.expect("git command failed to spawn");
assert!(
out.status.success(),
"git {:?} failed: stderr={}",
args,
String::from_utf8_lossy(&out.stderr)
);
}
#[test]
fn resolve_ipc_project_root_returns_superproject_for_submodule_file() {
let parent_dir = TempDir::new().unwrap();
let sub_src_dir = TempDir::new().unwrap();
let parent = parent_dir.path().canonicalize().unwrap();
let sub_src = sub_src_dir.path().canonicalize().unwrap();
git(&sub_src, &["init"]);
std::fs::write(sub_src.join("README.md"), "sub").unwrap();
git(&sub_src, &["add", "README.md"]);
git(&sub_src, &["commit", "-m", "init"]);
git(&parent, &["init"]);
std::fs::write(parent.join("README.md"), "parent").unwrap();
git(&parent, &["add", "README.md"]);
git(&parent, &["commit", "-m", "init"]);
git(&parent, &[
"submodule", "add",
sub_src.to_string_lossy().as_ref(),
"src/submodule",
]);
std::fs::create_dir_all(parent.join(".agent-doc/patches")).unwrap();
let doc = parent.join("src/submodule/test.md");
std::fs::write(&doc, "---\n---\n\n<!-- agent:exchange -->c<!-- /agent:exchange -->\n").unwrap();
let canonical = doc.canonicalize().unwrap();
let project_root = resolve_ipc_project_root(&canonical);
assert_eq!(
project_root, parent,
"submodule file must resolve to parent (superproject) for IPC patch routing"
);
let submodule_toplevel = parent.join("src/submodule");
assert_ne!(
project_root, submodule_toplevel,
"must not return the submodule toplevel — IDE plugin only watches parent"
);
}
#[ignore]
#[test]
fn try_ipc_routes_to_superproject_when_available() {
let parent_dir = TempDir::new().unwrap();
let submodule_dir = TempDir::new().unwrap();
let parent = parent_dir.path();
let submodule = submodule_dir.path();
Command::new("git")
.args(&["init"])
.current_dir(parent)
.output()
.expect("failed to init parent repo");
Command::new("git")
.args(&["init"])
.current_dir(submodule)
.output()
.expect("failed to init submodule repo");
let parent_agent_doc = parent.join(".agent-doc");
std::fs::create_dir_all(parent_agent_doc.join("patches")).unwrap();
std::fs::create_dir_all(parent_agent_doc.join("snapshots")).unwrap();
let test_file = submodule.join("test.txt");
std::fs::write(&test_file, "submodule content").unwrap();
Command::new("git")
.args(&["add", "test.txt"])
.current_dir(submodule)
.output()
.expect("failed to stage in submodule");
Command::new("git")
.args(&["commit", "-m", "initial"])
.current_dir(submodule)
.output()
.expect("failed to commit in submodule");
Command::new("git")
.args(&["submodule", "add", submodule.to_string_lossy().as_ref(), "src/submodule"])
.current_dir(parent)
.output()
.expect("failed to add submodule");
let submodule_src = parent.join("src/submodule");
let doc_in_submodule = submodule_src.join("test.md");
std::fs::create_dir_all(&submodule_src).ok();
std::fs::write(&doc_in_submodule, "---\nsession: test\n---\n\nContent\n").unwrap();
std::fs::create_dir_all(parent_agent_doc.join("snapshots")).unwrap();
std::fs::create_dir_all(parent_agent_doc.join("crdt")).unwrap();
let patch = crate::template::PatchBlock::new("exchange", "test response");
let result = try_ipc(&doc_in_submodule, &[patch], "", None, None, None, None).unwrap_or(false);
let parent_patches = parent.join(".agent-doc/patches");
let entries: Vec<_> = std::fs::read_dir(&parent_patches)
.ok()
.and_then(|rd| Some(rd.filter_map(|e| e.ok()).collect()))
.unwrap_or_default();
assert!(
!entries.is_empty(),
"patch file should be written to parent's .agent-doc/patches directory for submodule documents"
);
let submodule_patches = submodule_src.join(".agent-doc/patches");
assert!(
!submodule_patches.exists(),
"submodule should NOT have its own .agent-doc/patches directory when parent handles routing"
);
}
#[ignore]
#[test]
fn try_ipc_falls_back_to_find_project_root_when_not_in_git() {
let dir = TempDir::new().unwrap();
let agent_doc_dir = dir.path().join(".agent-doc");
std::fs::create_dir_all(agent_doc_dir.join("patches")).unwrap();
std::fs::create_dir_all(agent_doc_dir.join("snapshots")).unwrap();
let doc = dir.path().join("test.md");
std::fs::write(&doc, "---\nsession: test\n---\n\n<!-- agent:exchange -->content<!-- /agent:exchange -->\n")
.unwrap();
let patch = crate::template::PatchBlock::new("exchange", "response");
let result = try_ipc(&doc, &[patch], "", None, None, None, None).unwrap_or(false);
let patches_dir = agent_doc_dir.join("patches");
let entries: Vec<_> = std::fs::read_dir(&patches_dir)
.ok()
.and_then(|rd| Some(rd.filter_map(|e| e.ok()).collect()))
.unwrap_or_default();
assert!(
!entries.is_empty(),
"patch file should be written via find_project_root fallback when not in git"
);
}
}