use anyhow::{Context, Result};
use fs2::FileExt;
use std::fs::OpenOptions;
use std::io::Read;
use std::path::Path;
use crate::{component, frontmatter, merge, recover, sessions, snapshot, template};
use crate::snapshot::find_project_root;
fn find_boundary_id(doc: &str, component_name: &str) -> Option<String> {
let components = component::parse(doc).ok()?;
let comp = components.iter().find(|c| c.name == component_name)?;
let content = &doc[comp.open_end..comp.close_start];
let code_ranges = component::find_code_ranges(doc);
let prefix = "<!-- agent:boundary:";
let suffix = " -->";
let mut search_from = 0;
while let Some(start) = content[search_from..].find(prefix) {
let abs_start = comp.open_end + search_from + start;
if code_ranges.iter().any(|&(cs, ce)| abs_start >= cs && abs_start < ce) {
search_from += start + prefix.len();
continue;
}
let id_start = search_from + start + prefix.len();
if let Some(end) = content[id_start..].find(suffix) {
let id = &content[id_start..id_start + end];
if !id.is_empty() {
return Some(id.to_string());
}
}
break;
}
None
}
fn is_append_mode_component(name: &str) -> bool {
matches!(name, "exchange" | "findings")
}
fn log_dedup(file: &Path, context: &str) {
let msg = format!("[write] dedup: {} — {}", file.display(), context);
eprintln!("{}", msg);
use std::io::Write;
if let Ok(mut f) = std::fs::OpenOptions::new()
.create(true).append(true)
.open("/tmp/agent-doc-write-dedup.log")
{
let ts = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.map(|d| d.as_secs())
.unwrap_or(0);
let bt = std::backtrace::Backtrace::force_capture();
let _ = writeln!(f, "[{}] {} backtrace:\n{}", ts, msg, bt);
}
}
fn verify_pane_ownership(file: &Path) -> Result<()> {
if !sessions::in_tmux() {
return Ok(());
}
let content = match std::fs::read_to_string(file) {
Ok(c) => c,
Err(_) => return Ok(()),
};
let session_id = match frontmatter::parse(&content) {
Ok((fm, _)) => match fm.session {
Some(s) => s,
None => return Ok(()),
},
Err(_) => return Ok(()),
};
let entry = match sessions::lookup_entry(&session_id) {
Ok(Some(e)) => e,
_ => return Ok(()),
};
let current = match sessions::current_pane() {
Ok(p) => p,
Err(_) => return Ok(()),
};
if entry.pane != current {
anyhow::bail!(
"pane ownership mismatch: session {} owned by pane {}, current pane is {}. \
Use `agent-doc claim` to reclaim.",
session_id, entry.pane, current
);
}
Ok(())
}
pub fn run(file: &Path, baseline: Option<&str>) -> Result<()> {
if !file.exists() {
anyhow::bail!("file not found: {}", file.display());
}
verify_pane_ownership(file)?;
let mut response = String::new();
std::io::stdin()
.read_to_string(&mut response)
.context("failed to read response from stdin")?;
if response.trim().is_empty() {
anyhow::bail!("empty response — nothing to write");
}
let response = strip_assistant_heading(&response);
let content_at_start = std::fs::read_to_string(file)
.with_context(|| format!("failed to read {}", file.display()))?;
let base = baseline.unwrap_or(&content_at_start);
recover::save_pending(file, &response)?;
snapshot::save_pre_response(file, base)?;
let mut content_ours = base.to_string();
if !content_ours.ends_with('\n') {
content_ours.push('\n');
}
content_ours.push_str("## Assistant\n\n");
content_ours.push_str(&response);
if !response.ends_with('\n') {
content_ours.push('\n');
}
content_ours.push_str("\n## User\n\n");
let doc_lock = acquire_doc_lock(file)?;
let content_current = std::fs::read_to_string(file)
.with_context(|| format!("failed to re-read {}", file.display()))?;
let final_content = if content_current == base {
content_ours.clone()
} else {
eprintln!("[write] File was modified during response generation. Merging...");
merge::merge_contents(base, &content_ours, &content_current)?
};
if final_content == content_current {
log_dedup(file, "no changes after merge, skipping write");
drop(doc_lock);
recover::clear_pending(file)?;
return Ok(());
}
atomic_write(file, &final_content)?;
snapshot::save(file, &content_ours)?;
drop(doc_lock);
recover::clear_pending(file)?;
eprintln!("[write] Response appended to {}", file.display());
Ok(())
}
pub fn run_template(file: &Path, baseline: Option<&str>) -> Result<()> {
if !file.exists() {
anyhow::bail!("file not found: {}", file.display());
}
verify_pane_ownership(file)?;
let mut response = String::new();
std::io::stdin()
.read_to_string(&mut response)
.context("failed to read response from stdin")?;
if response.trim().is_empty() {
anyhow::bail!("empty response — nothing to write");
}
recover::save_pending(file, &response)?;
let (mut patches, unmatched) = template::parse_patches(&response)
.context("failed to parse patch blocks from response")?;
sanitize_patches(&mut patches);
if patches.is_empty() && unmatched.trim().is_empty() {
anyhow::bail!("no patch blocks or content found in response");
}
let content_at_start = std::fs::read_to_string(file)
.with_context(|| format!("failed to read {}", file.display()))?;
let base = baseline.unwrap_or(&content_at_start);
snapshot::save_pre_response(file, base)?;
let content_ours = template::apply_patches(base, &patches, &unmatched, file)
.context("failed to apply template patches")?;
let doc_lock = acquire_doc_lock(file)?;
let content_current = std::fs::read_to_string(file)
.with_context(|| format!("failed to re-read {}", file.display()))?;
let final_content = if content_current == base {
content_ours.clone()
} else {
eprintln!("[write] File was modified during response generation. Merging...");
merge::merge_contents(base, &content_ours, &content_current)?
};
if final_content == content_current {
log_dedup(file, "no changes after merge, skipping write");
drop(doc_lock);
recover::clear_pending(file)?;
return Ok(());
}
atomic_write(file, &final_content)?;
snapshot::save(file, &content_ours)?;
drop(doc_lock);
recover::clear_pending(file)?;
eprintln!(
"[write] Template patches applied to {} ({} components patched)",
file.display(),
patches.len()
);
Ok(())
}
pub fn run_stream(file: &Path, baseline: Option<&str>, force_disk: bool) -> Result<()> {
let t_total = std::time::Instant::now();
if !file.exists() {
anyhow::bail!("file not found: {}", file.display());
}
verify_pane_ownership(file)?;
let mut response = String::new();
std::io::stdin()
.read_to_string(&mut response)
.context("failed to read response from stdin")?;
if response.trim().is_empty() {
anyhow::bail!("empty response — nothing to write");
}
recover::save_pending(file, &response)?;
let (mut patches, unmatched) = template::parse_patches(&response)
.context("failed to parse patch blocks from response")?;
sanitize_patches(&mut patches);
if patches.is_empty() && unmatched.trim().is_empty() {
anyhow::bail!("no patch blocks or content found in response");
}
{
let pre_content = std::fs::read_to_string(file)
.with_context(|| format!("failed to read {} for pre-response", file.display()))?;
snapshot::save_pre_response(file, &pre_content)?;
}
if !force_disk {
let canonical = file.canonicalize()?;
let project_root = find_project_root(&canonical)
.unwrap_or_else(|| canonical.parent().unwrap_or(Path::new(".")).to_path_buf());
let patches_dir = project_root.join(".agent-doc/patches");
if patches_dir.exists() {
let content_at_start = std::fs::read_to_string(file)
.with_context(|| format!("failed to read {}", file.display()))?;
let base = baseline.unwrap_or(&content_at_start);
let mode_overrides = std::collections::HashMap::new();
let t_apply = std::time::Instant::now();
let content_ours = template::apply_patches_with_overrides(
base, &patches, &unmatched, file, &mode_overrides,
).context("failed to apply patches for snapshot")?;
let elapsed_apply = t_apply.elapsed().as_millis();
if elapsed_apply > 0 {
eprintln!("[perf] apply_patches_with_overrides: {}ms", elapsed_apply);
}
if content_ours == content_at_start {
log_dedup(file, "no changes after merge, skipping write");
recover::clear_pending(file)?;
return Ok(());
}
let t_ipc = std::time::Instant::now();
if try_ipc(file, &patches, &unmatched, None, baseline, Some(&content_ours))? {
let elapsed_ipc = t_ipc.elapsed().as_millis();
if elapsed_ipc > 0 {
eprintln!("[perf] try_ipc: {}ms", elapsed_ipc);
}
let elapsed_total = t_total.elapsed().as_millis();
if elapsed_total > 0 {
eprintln!("[perf] run_stream total: {}ms", elapsed_total);
}
recover::clear_pending(file)?;
return Ok(());
}
let hash = snapshot::doc_hash(file)?;
let patch_file = patches_dir.join(format!("{}.json", hash));
let raw_doc = std::fs::read_to_string(file).unwrap_or_default();
let current_doc_for_boundary = template::reposition_boundary_to_end_with_summary(&raw_doc, file.file_stem().and_then(|s| s.to_str()));
let ipc_patches: Vec<serde_json::Value> = patches
.iter()
.filter(|p| p.name != "frontmatter")
.map(|p| {
let mut patch_json = serde_json::json!({
"component": p.name,
"content": p.content,
});
if let Some(bid) = find_boundary_id(¤t_doc_for_boundary, &p.name) {
patch_json["boundary_id"] = serde_json::Value::String(bid);
} else if is_append_mode_component(&p.name) {
patch_json["ensure_boundary"] = serde_json::Value::Bool(true);
}
patch_json
})
.collect();
let mut ipc_payload = serde_json::json!({
"file": canonical.to_string_lossy(),
"patches": ipc_patches,
"unmatched": unmatched.trim(),
"baseline": baseline.unwrap_or(""),
});
let frontmatter_yaml: Option<String> = patches
.iter()
.find(|p| p.name == "frontmatter")
.map(|p| p.content.trim().to_string());
if let Some(ref yaml) = frontmatter_yaml {
ipc_payload["frontmatter"] = serde_json::Value::String(yaml.clone());
}
atomic_write(
&patch_file,
&serde_json::to_string_pretty(&ipc_payload)?,
)?;
eprintln!("[write] IPC timeout — response saved as patch, awaiting plugin");
std::process::exit(75); }
}
if force_disk
&& let Ok(canonical) = file.canonicalize() {
let project_root = find_project_root(&canonical)
.unwrap_or_else(|| canonical.parent().unwrap_or(Path::new(".")).to_path_buf());
let patches_dir = project_root.join(".agent-doc/patches");
if let Ok(hash) = snapshot::doc_hash(file) {
let patch_file = patches_dir.join(format!("{}.json", hash));
if patch_file.exists() {
eprintln!("[write] cleaning stale IPC patch file to prevent double-write");
let _ = std::fs::remove_file(&patch_file);
}
}
}
let t_disk = std::time::Instant::now();
let content_at_start = std::fs::read_to_string(file)
.with_context(|| format!("failed to read {}", file.display()))?;
let base = baseline.unwrap_or(&content_at_start);
let mode_overrides = std::collections::HashMap::new();
let t_apply2 = std::time::Instant::now();
let mut content_ours = template::apply_patches_with_overrides(
base, &patches, &unmatched, file, &mode_overrides,
).context("failed to apply template patches")?;
let elapsed_apply2 = t_apply2.elapsed().as_millis();
if elapsed_apply2 > 0 {
eprintln!("[perf] apply_patches_with_overrides (disk): {}ms", elapsed_apply2);
}
if let Some(fm_patch) = patches.iter().find(|p| p.name == "frontmatter") {
content_ours = crate::frontmatter::merge_fields(&content_ours, &fm_patch.content)
.context("failed to merge frontmatter patch")?;
}
let doc_lock = acquire_doc_lock(file)?;
let content_current = std::fs::read_to_string(file)
.with_context(|| format!("failed to re-read {}", file.display()))?;
let (final_content, crdt_state) = if content_current == base {
let doc = crate::crdt::CrdtDoc::from_text(&content_ours);
(content_ours.clone(), doc.encode_state())
} else {
eprintln!("[write] File was modified during response generation. CRDT merging...");
let base_state = crate::crdt::CrdtDoc::from_text(base).encode_state();
merge::merge_contents_crdt(Some(&base_state), &content_ours, &content_current)?
};
if final_content == content_current {
log_dedup(file, "no changes after merge, skipping write");
drop(doc_lock);
recover::clear_pending(file)?;
let elapsed_total = t_total.elapsed().as_millis();
if elapsed_total > 0 {
eprintln!("[perf] run_stream total: {}ms", elapsed_total);
}
return Ok(());
}
atomic_write(file, &final_content)?;
snapshot::save(file, &content_ours)?;
snapshot::save_crdt(file, &crdt_state)?;
drop(doc_lock);
recover::clear_pending(file)?;
let elapsed_disk = t_disk.elapsed().as_millis();
if elapsed_disk > 0 {
eprintln!("[perf] disk_write_path: {}ms", elapsed_disk);
}
let elapsed_total = t_total.elapsed().as_millis();
if elapsed_total > 0 {
eprintln!("[perf] run_stream total: {}ms", elapsed_total);
}
eprintln!(
"[write] Stream patches applied to {} ({} components patched, CRDT)",
file.display(),
patches.len()
);
Ok(())
}
pub fn run_ipc(file: &Path, baseline: Option<&str>) -> Result<()> {
if !file.exists() {
anyhow::bail!("file not found: {}", file.display());
}
let mut response = String::new();
std::io::stdin()
.read_to_string(&mut response)
.context("failed to read response from stdin")?;
if response.trim().is_empty() {
anyhow::bail!("empty response — nothing to write");
}
recover::save_pending(file, &response)?;
let (mut patches, unmatched) = template::parse_patches(&response)
.context("failed to parse patch blocks from response")?;
sanitize_patches(&mut patches);
if patches.is_empty() && unmatched.trim().is_empty() {
anyhow::bail!("no patch blocks or content found in response");
}
let canonical = file.canonicalize()?;
let hash = snapshot::doc_hash(file)?;
let project_root = find_project_root(&canonical)
.unwrap_or_else(|| canonical.parent().unwrap_or(Path::new(".")).to_path_buf());
let patches_dir = project_root.join(".agent-doc/patches");
std::fs::create_dir_all(&patches_dir)?;
let patch_file = patches_dir.join(format!("{}.json", hash));
let raw_doc = std::fs::read_to_string(file).unwrap_or_default();
let current_doc_for_boundary = template::reposition_boundary_to_end_with_summary(&raw_doc, file.file_stem().and_then(|s| s.to_str()));
let mut frontmatter_yaml: Option<String> = None;
let ipc_patches: Vec<serde_json::Value> = patches
.iter()
.filter_map(|p| {
if p.name == "frontmatter" {
frontmatter_yaml = Some(p.content.trim().to_string());
None
} else {
let mut patch_json = serde_json::json!({
"component": p.name,
"content": p.content,
});
if let Some(bid) = find_boundary_id(¤t_doc_for_boundary, &p.name) {
patch_json["boundary_id"] = serde_json::Value::String(bid);
} else if is_append_mode_component(&p.name) {
patch_json["ensure_boundary"] = serde_json::Value::Bool(true);
}
Some(patch_json)
}
})
.collect();
let mut ipc_payload = serde_json::json!({
"file": canonical.to_string_lossy(),
"patches": ipc_patches,
"unmatched": unmatched.trim(),
"baseline": baseline.unwrap_or(""),
});
if let Some(ref yaml) = frontmatter_yaml {
ipc_payload["frontmatter"] = serde_json::Value::String(yaml.clone());
}
atomic_write(
&patch_file,
&serde_json::to_string_pretty(&ipc_payload)?,
)?;
eprintln!(
"[write] IPC patch written to {} ({} components)",
patch_file.display(),
patches.len()
);
let timeout = std::time::Duration::from_secs(2);
let poll_interval = std::time::Duration::from_millis(100);
let start = std::time::Instant::now();
while start.elapsed() < timeout {
if !patch_file.exists() {
let content = std::fs::read_to_string(file)
.with_context(|| format!("failed to read {} after IPC", file.display()))?;
snapshot::save(file, &content)?;
let crdt_doc = crate::crdt::CrdtDoc::from_text(&content);
snapshot::save_crdt(file, &crdt_doc.encode_state())?;
recover::clear_pending(file)?;
eprintln!("[write] IPC patch consumed by plugin — snapshot updated");
return Ok(());
}
std::thread::sleep(poll_interval);
}
eprintln!("[write] IPC timeout ({}s) — falling back to direct write", timeout.as_secs());
let _ = std::fs::remove_file(&patch_file);
let content_at_start = std::fs::read_to_string(file)
.with_context(|| format!("failed to read {}", file.display()))?;
let base = baseline.unwrap_or(&content_at_start);
let mut content_ours = template::apply_patches(base, &patches, &unmatched, file)
.context("failed to apply template patches")?;
if let Some(ref yaml) = frontmatter_yaml {
content_ours = crate::frontmatter::merge_fields(&content_ours, yaml)
.context("failed to apply frontmatter patch")?;
}
let doc_lock = acquire_doc_lock(file)?;
let content_current = std::fs::read_to_string(file)
.with_context(|| format!("failed to re-read {}", file.display()))?;
let (final_content, crdt_state) = if content_current == base {
let doc = crate::crdt::CrdtDoc::from_text(&content_ours);
(content_ours.clone(), doc.encode_state())
} else {
eprintln!("[write] File was modified during response generation. CRDT merging...");
let crdt_state = snapshot::load_crdt(file)?;
merge::merge_contents_crdt(crdt_state.as_deref(), &content_ours, &content_current)?
};
atomic_write(file, &final_content)?;
snapshot::save(file, &content_ours)?;
snapshot::save_crdt(file, &crdt_state)?;
drop(doc_lock);
recover::clear_pending(file)?;
eprintln!(
"[write] Stream patches applied to {} ({} components patched, CRDT fallback)",
file.display(),
patches.len()
);
Ok(())
}
#[allow(dead_code)] pub fn apply_stream_from_string(file: &Path, response: &str) -> Result<()> {
let content = std::fs::read_to_string(file)
.with_context(|| format!("failed to read {}", file.display()))?;
let (mut patches, unmatched) = template::parse_patches(response)
.context("failed to parse patch blocks from response")?;
sanitize_patches(&mut patches);
let content_ours = template::apply_patches(&content, &patches, &unmatched, file)
.context("failed to apply template patches")?;
let doc_lock = acquire_doc_lock(file)?;
let content_current = std::fs::read_to_string(file)
.with_context(|| format!("failed to re-read {}", file.display()))?;
let (final_content, crdt_state) = if content_current == content {
let doc = crate::crdt::CrdtDoc::from_text(&content_ours);
(content_ours.clone(), doc.encode_state())
} else {
let crdt_state = snapshot::load_crdt(file)?;
merge::merge_contents_crdt(crdt_state.as_deref(), &content_ours, &content_current)?
};
atomic_write(file, &final_content)?;
snapshot::save(file, &content_ours)?;
snapshot::save_crdt(file, &crdt_state)?;
drop(doc_lock);
eprintln!("[write] Stream patches applied to {}", file.display());
Ok(())
}
pub fn apply_append_from_string(file: &Path, response: &str) -> Result<()> {
let response = strip_assistant_heading(response);
let content = std::fs::read_to_string(file)
.with_context(|| format!("failed to read {}", file.display()))?;
let mut content_ours = content.clone();
if !content_ours.ends_with('\n') {
content_ours.push('\n');
}
content_ours.push_str("## Assistant\n\n");
content_ours.push_str(&response);
if !response.ends_with('\n') {
content_ours.push('\n');
}
content_ours.push_str("\n## User\n\n");
let doc_lock = acquire_doc_lock(file)?;
let content_current = std::fs::read_to_string(file)
.with_context(|| format!("failed to re-read {}", file.display()))?;
let final_content = if content_current == content {
content_ours.clone()
} else {
merge::merge_contents(&content, &content_ours, &content_current)?
};
atomic_write(file, &final_content)?;
snapshot::save(file, &content_ours)?;
drop(doc_lock);
eprintln!("[write] Response appended to {}", file.display());
Ok(())
}
pub fn apply_template_from_string(file: &Path, response: &str) -> Result<()> {
let content = std::fs::read_to_string(file)
.with_context(|| format!("failed to read {}", file.display()))?;
let (mut patches, unmatched) = template::parse_patches(response)
.context("failed to parse patch blocks from response")?;
sanitize_patches(&mut patches);
let content_ours = template::apply_patches(&content, &patches, &unmatched, file)
.context("failed to apply template patches")?;
let doc_lock = acquire_doc_lock(file)?;
let content_current = std::fs::read_to_string(file)
.with_context(|| format!("failed to re-read {}", file.display()))?;
let final_content = if content_current == content {
content_ours.clone()
} else {
merge::merge_contents(&content, &content_ours, &content_current)?
};
atomic_write(file, &final_content)?;
snapshot::save(file, &content_ours)?;
drop(doc_lock);
eprintln!("[write] Template patches applied to {}", file.display());
Ok(())
}
pub fn try_ipc(
file: &Path,
patches: &[crate::template::PatchBlock],
unmatched: &str,
frontmatter_yaml: Option<&str>,
baseline: Option<&str>,
content_ours: Option<&str>,
) -> Result<bool> {
let canonical = file.canonicalize()?;
let hash = snapshot::doc_hash(file)?;
let project_root = find_project_root(&canonical)
.unwrap_or_else(|| canonical.parent().unwrap_or(Path::new(".")).to_path_buf());
if crate::ipc_socket::is_listener_active(&project_root) {
let ipc_patches_json = build_ipc_patches_json(file, patches, unmatched)?;
let mut socket_payload = serde_json::json!({
"type": "patch",
"file": canonical.to_string_lossy(),
"patches": ipc_patches_json,
"unmatched": unmatched.trim(),
"baseline": baseline.unwrap_or(""),
"reposition_boundary": true,
});
if let Some(yaml) = frontmatter_yaml {
socket_payload["frontmatter"] = serde_json::Value::String(yaml.to_string());
}
match crate::ipc_socket::send_message(&project_root, &socket_payload) {
Ok(Some(_ack)) => {
eprintln!("[write] socket IPC patch delivered");
let snap_content = if let Some(ours) = content_ours {
ours.to_string()
} else {
std::fs::read_to_string(file)
.with_context(|| format!("failed to read {} after socket IPC", file.display()))?
};
snapshot::save(file, &snap_content)?;
let crdt_doc = crate::crdt::CrdtDoc::from_text(&snap_content);
snapshot::save_crdt(file, &crdt_doc.encode_state())?;
return Ok(true);
}
Ok(None) => {
eprintln!("[write] socket IPC sent but no ack — falling back to file IPC");
}
Err(e) => {
eprintln!("[write] socket IPC failed: {} — falling back to file IPC", e);
}
}
}
let patches_dir = project_root.join(".agent-doc/patches");
if !patches_dir.exists() {
return Ok(false);
}
let patch_file = patches_dir.join(format!("{}.json", hash));
let ipc_patches = build_ipc_patches_json(file, patches, unmatched)?;
let mut ipc_payload = serde_json::json!({
"file": canonical.to_string_lossy(),
"patches": ipc_patches,
"unmatched": unmatched.trim(),
"baseline": baseline.unwrap_or(""),
"reposition_boundary": true,
});
if let Some(yaml) = frontmatter_yaml {
ipc_payload["frontmatter"] = serde_json::Value::String(yaml.to_string());
}
write_ipc_and_poll(&patch_file, &ipc_payload, file, patches.len(), content_ours)
}
pub fn try_ipc_full_content(
file: &Path,
content: &str,
) -> Result<bool> {
let canonical = file.canonicalize()?;
let project_root = find_project_root(&canonical)
.unwrap_or_else(|| canonical.parent().unwrap_or(Path::new(".")).to_path_buf());
if crate::ipc_socket::is_listener_active(&project_root) {
let socket_payload = serde_json::json!({
"type": "patch",
"file": canonical.to_string_lossy(),
"patches": [],
"unmatched": "",
"fullContent": content,
});
match crate::ipc_socket::send_message(&project_root, &socket_payload) {
Ok(Some(_ack)) => {
eprintln!("[write] socket IPC full content delivered");
snapshot::save(file, content)?;
let crdt_doc = crate::crdt::CrdtDoc::from_text(content);
snapshot::save_crdt(file, &crdt_doc.encode_state())?;
return Ok(true);
}
Ok(None) => {
eprintln!("[write] socket IPC full content sent but no ack — falling back to file IPC");
}
Err(e) => {
eprintln!("[write] socket IPC full content failed: {} — falling back to file IPC", e);
}
}
}
let hash = snapshot::doc_hash(file)?;
let patches_dir = project_root.join(".agent-doc/patches");
if !patches_dir.exists() {
return Ok(false);
}
let patch_file = patches_dir.join(format!("{}.json", hash));
let ipc_payload = serde_json::json!({
"file": canonical.to_string_lossy(),
"patches": [],
"unmatched": "",
"baseline": "",
"fullContent": content,
});
write_ipc_and_poll(&patch_file, &ipc_payload, file, 0, Some(content))
}
pub fn try_ipc_reposition_boundary(file: &Path) -> bool {
let canonical = match file.canonicalize() {
Ok(c) => c,
Err(_) => return false,
};
let project_root = find_project_root(&canonical)
.unwrap_or_else(|| canonical.parent().unwrap_or(Path::new(".")).to_path_buf());
if crate::ipc_socket::is_listener_active(&project_root) {
match crate::ipc_socket::send_reposition(
&project_root,
&canonical.to_string_lossy(),
) {
Ok(true) => {
eprintln!("[commit] socket IPC reposition boundary sent");
return true;
}
_ => {
eprintln!("[commit] socket IPC reposition failed — falling back to file IPC");
}
}
}
let hash = match snapshot::doc_hash(file) {
Ok(h) => h,
Err(_) => return false,
};
let patches_dir = project_root.join(".agent-doc/patches");
if !patches_dir.exists() {
return false;
}
let patch_file = patches_dir.join(format!("{}.json", hash));
let payload = serde_json::json!({
"file": canonical.to_string_lossy(),
"patches": [],
"unmatched": "",
"reposition_boundary": true,
});
let json = match serde_json::to_string_pretty(&payload) {
Ok(j) => j,
Err(_) => return false,
};
if atomic_write(&patch_file, &json).is_err() {
return false;
}
eprintln!("[commit] IPC reposition boundary signal sent");
let timeout = std::time::Duration::from_millis(500);
let poll_interval = std::time::Duration::from_millis(50);
let start = std::time::Instant::now();
while start.elapsed() < timeout {
if !patch_file.exists() {
eprintln!("[commit] plugin repositioned boundary via IPC");
return true;
}
std::thread::sleep(poll_interval);
}
let _ = std::fs::remove_file(&patch_file);
eprintln!("[commit] IPC reposition timeout (non-fatal)");
false
}
fn write_ipc_and_poll(
patch_file: &Path,
payload: &serde_json::Value,
doc_file: &Path,
patch_count: usize,
content_ours: Option<&str>,
) -> Result<bool> {
atomic_write(
patch_file,
&serde_json::to_string_pretty(payload)?,
)?;
eprintln!(
"[write] IPC patch written to {} ({} components)",
patch_file.display(),
patch_count
);
let timeout = std::time::Duration::from_secs(2);
let poll_interval = std::time::Duration::from_millis(100);
let start = std::time::Instant::now();
while start.elapsed() < timeout {
if !patch_file.exists() {
std::thread::sleep(std::time::Duration::from_millis(200));
let current_on_disk = std::fs::read_to_string(doc_file).unwrap_or_default();
let baseline_content = payload.get("baseline")
.and_then(|v| v.as_str())
.unwrap_or("");
if !baseline_content.is_empty() && current_on_disk == baseline_content {
eprintln!(
"[write] IPC patch consumed but file unchanged on disk — plugin may have failed to apply. Falling back to disk write."
);
return Ok(false);
}
let patch_list = payload.get("patches")
.and_then(|v| v.as_array());
if let Some(patches) = patch_list {
let has_content_patch = patches.iter().any(|p| {
let content = p.get("content").and_then(|c| c.as_str()).unwrap_or("");
!content.trim().is_empty()
});
if has_content_patch {
let any_present = patches.iter().any(|p| {
let content = p.get("content").and_then(|c| c.as_str()).unwrap_or("");
if content.trim().is_empty() { return true; }
content.lines()
.find(|l| !l.trim().is_empty())
.is_none_or(|first_line| current_on_disk.contains(first_line.trim()))
});
if !any_present {
eprintln!(
"[write] IPC patch consumed but response content not found in file — plugin may have partially failed. Falling back to disk write."
);
return Ok(false);
}
}
}
let snap_content = if let Some(ours) = content_ours {
ours.to_string()
} else {
std::fs::read_to_string(doc_file)
.with_context(|| format!("failed to read {} after IPC", doc_file.display()))?
};
snapshot::save(doc_file, &snap_content)?;
let crdt_doc = crate::crdt::CrdtDoc::from_text(&snap_content);
snapshot::save_crdt(doc_file, &crdt_doc.encode_state())?;
eprintln!("[write] IPC patch consumed by plugin — snapshot updated");
return Ok(true);
}
std::thread::sleep(poll_interval);
}
eprintln!("[write] IPC timeout ({}s) — falling back to direct write", timeout.as_secs());
let _ = std::fs::remove_file(patch_file);
Ok(false)
}
fn build_ipc_patches_json(
file: &Path,
patches: &[crate::template::PatchBlock],
unmatched: &str,
) -> Result<Vec<serde_json::Value>> {
let raw_doc = std::fs::read_to_string(file).unwrap_or_default();
let current_doc = template::reposition_boundary_to_end_with_summary(
&raw_doc,
file.file_stem().and_then(|s| s.to_str()),
);
let mut ipc_patches: Vec<serde_json::Value> = patches
.iter()
.filter(|p| p.name != "frontmatter")
.map(|p| {
let mut patch_json = serde_json::json!({
"component": p.name,
"content": p.content,
});
if let Some(bid) = find_boundary_id(¤t_doc, &p.name) {
patch_json["boundary_id"] = serde_json::Value::String(bid);
} else if is_append_mode_component(&p.name) {
patch_json["ensure_boundary"] = serde_json::Value::Bool(true);
}
patch_json
})
.collect();
let effective_unmatched = unmatched.trim().to_string();
if ipc_patches.is_empty() && !effective_unmatched.is_empty() {
for target in &["exchange", "output"] {
if let Some(bid) = find_boundary_id(¤t_doc, target) {
eprintln!(
"[write] synthesizing {} patch for unmatched content (boundary {})",
target, &bid[..8.min(bid.len())]
);
ipc_patches.push(serde_json::json!({
"component": target,
"content": &effective_unmatched,
"boundary_id": bid,
}));
break;
} else if is_append_mode_component(target) {
eprintln!(
"[write] synthesizing {} patch for unmatched content (ensure_boundary)",
target
);
ipc_patches.push(serde_json::json!({
"component": target,
"content": &effective_unmatched,
"ensure_boundary": true,
}));
break;
}
}
}
Ok(ipc_patches)
}
pub fn sanitize_component_tags(content: &str) -> String {
let bytes = content.as_bytes();
let len = bytes.len();
let mut result = String::with_capacity(len);
let mut pos = 0;
while pos + 4 <= len {
if &bytes[pos..pos + 4] != b"<!--" {
let ch_len = utf8_char_len(bytes[pos]);
result.push_str(&content[pos..pos + ch_len]);
pos += ch_len;
continue;
}
let close = match find_comment_close(bytes, pos + 4) {
Some(c) => c, None => {
result.push_str("<!--");
pos += 4;
continue;
}
};
let inner = &content[pos + 4..close - 3];
let trimmed = inner.trim();
if component::is_agent_marker(trimmed) {
let original = &content[pos..close];
result.push_str(&original.replace('<', "<").replace('>', ">"));
} else {
result.push_str(&content[pos..close]);
}
pos = close;
}
if pos < len {
result.push_str(&content[pos..]);
}
result
}
fn utf8_char_len(first_byte: u8) -> usize {
match first_byte {
0x00..=0x7F => 1,
0xC0..=0xDF => 2,
0xE0..=0xEF => 3,
0xF0..=0xFF => 4,
_ => 1, }
}
fn find_comment_close(bytes: &[u8], start: usize) -> Option<usize> {
let len = bytes.len();
let mut i = start;
while i + 3 <= len {
if &bytes[i..i + 3] == b"-->" {
return Some(i + 3);
}
i += 1;
}
None
}
fn sanitize_patches(patches: &mut [template::PatchBlock]) {
for patch in patches.iter_mut() {
patch.content = sanitize_component_tags(&patch.content);
}
}
pub fn strip_assistant_heading(response: &str) -> String {
let mut result = response.to_string();
let trimmed = result.trim_start();
if let Some(rest) = trimmed.strip_prefix("## Assistant") {
let rest = rest.strip_prefix('\n').unwrap_or(rest);
let rest = rest.trim_start_matches('\n');
result = rest.to_string();
}
let trimmed_end = result.trim_end();
if let Some(before) = trimmed_end.strip_suffix("## User") {
result = before.trim_end_matches('\n').to_string();
if !result.ends_with('\n') {
result.push('\n');
}
}
result
}
fn acquire_doc_lock(path: &Path) -> Result<std::fs::File> {
let lock_path = crate::snapshot::lock_path_for(path)?;
if let Some(parent) = lock_path.parent() {
std::fs::create_dir_all(parent)?;
}
let file = OpenOptions::new()
.create(true)
.write(true)
.truncate(false)
.open(&lock_path)
.with_context(|| format!("failed to open doc lock {}", lock_path.display()))?;
file.lock_exclusive()
.with_context(|| format!("failed to acquire doc lock on {}", lock_path.display()))?;
Ok(file)
}
pub fn atomic_write_pub(path: &Path, content: &str) -> Result<()> {
atomic_write(path, content)
}
fn atomic_write(path: &Path, content: &str) -> Result<()> {
use std::io::Write;
let parent = path.parent().unwrap_or(Path::new("."));
let mut tmp = tempfile::NamedTempFile::new_in(parent)
.with_context(|| format!("failed to create temp file in {}", parent.display()))?;
tmp.write_all(content.as_bytes())
.with_context(|| "failed to write temp file")?;
tmp.persist(path)
.with_context(|| format!("failed to rename temp file to {}", path.display()))?;
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
use tempfile::TempDir;
#[test]
fn write_appends_response() {
let dir = TempDir::new().unwrap();
let doc = dir.path().join("test.md");
fs::write(&doc, "---\nsession: test\n---\n\n## User\n\nHello\n").unwrap();
let base = fs::read_to_string(&doc).unwrap();
let response = "This is the assistant response.";
let mut content_ours = base.clone();
if !content_ours.ends_with('\n') {
content_ours.push('\n');
}
content_ours.push_str("## Assistant\n\n");
content_ours.push_str(response);
content_ours.push('\n');
content_ours.push_str("\n## User\n\n");
atomic_write(&doc, &content_ours).unwrap();
let result = fs::read_to_string(&doc).unwrap();
assert!(result.contains("## Assistant\n\nThis is the assistant response."));
assert!(result.contains("\n\n## User\n\n"));
assert!(result.contains("## User\n\nHello"));
}
#[test]
fn write_updates_snapshot() {
let dir = TempDir::new().unwrap();
let doc = dir.path().join("test.md");
let content = "---\nsession: test\n---\n\n## User\n\nHello\n\n## Assistant\n\nResponse\n\n## User\n\n";
fs::write(&doc, content).unwrap();
let snap_path = snapshot::path_for(&doc).unwrap();
assert!(snap_path.to_string_lossy().contains(".agent-doc/snapshots/"));
let snap_abs = dir.path().join(&snap_path);
fs::create_dir_all(snap_abs.parent().unwrap()).unwrap();
fs::write(&snap_abs, content).unwrap();
let loaded = fs::read_to_string(&snap_abs).unwrap();
assert_eq!(loaded, content);
}
#[test]
fn write_preserves_user_edits_via_merge() {
let base = "---\nsession: test\n---\n\n## User\n\nOriginal question\n";
let response = "My response";
let mut ours = base.to_string();
ours.push_str("\n## Assistant\n\n");
ours.push_str(response);
ours.push_str("\n\n## User\n\n");
let theirs = "---\nsession: test\n---\n\n## User\n\nOriginal question\nAnd a follow-up!\n";
let merged = merge::merge_contents(base, &ours, theirs).unwrap();
assert!(merged.contains("My response"), "response missing from merge");
assert!(merged.contains("And a follow-up!"), "user edit missing from merge");
}
#[test]
fn write_no_merge_when_unchanged() {
let base = "---\nsession: test\n---\n\n## User\n\nHello\n";
let response = "Response here";
let mut ours = base.to_string();
ours.push_str("\n## Assistant\n\n");
ours.push_str(response);
ours.push_str("\n\n## User\n\n");
let dir = TempDir::new().unwrap();
let doc = dir.path().join("test.md");
fs::write(&doc, base).unwrap();
let doc_lock = acquire_doc_lock(&doc).unwrap();
let content_current = fs::read_to_string(&doc).unwrap();
let final_content = if content_current == base {
ours.clone()
} else {
merge::merge_contents(base, &ours, &content_current).unwrap()
};
drop(doc_lock);
assert_eq!(final_content, ours);
}
#[test]
fn atomic_write_correct_content() {
let dir = TempDir::new().unwrap();
let path = dir.path().join("atomic.md");
atomic_write(&path, "hello world").unwrap();
assert_eq!(fs::read_to_string(&path).unwrap(), "hello world");
}
#[test]
fn concurrent_writes_no_corruption() {
use std::sync::{Arc, Barrier};
let dir = TempDir::new().unwrap();
let path = dir.path().join("concurrent.md");
fs::write(&path, "initial").unwrap();
let n = 20;
let barrier = Arc::new(Barrier::new(n));
let mut handles = Vec::new();
for i in 0..n {
let p = path.clone();
let parent = dir.path().to_path_buf();
let bar = Arc::clone(&barrier);
let content = format!("writer-{}-content", i);
handles.push(std::thread::spawn(move || {
bar.wait();
let mut tmp = tempfile::NamedTempFile::new_in(&parent).unwrap();
std::io::Write::write_all(&mut tmp, content.as_bytes()).unwrap();
tmp.persist(&p).unwrap();
}));
}
for h in handles {
h.join().unwrap();
}
let final_content = fs::read_to_string(&path).unwrap();
assert!(
final_content.starts_with("writer-") && final_content.ends_with("-content"),
"unexpected content: {}",
final_content
);
}
#[test]
fn snapshot_excludes_concurrent_user_edits() {
let dir = TempDir::new().unwrap();
let agent_doc_dir = dir.path().join(".agent-doc").join("snapshots");
fs::create_dir_all(&agent_doc_dir).unwrap();
let doc = dir.path().join("test.md");
let base = "---\nsession: test\n---\n\n## User\n\nOriginal question\n";
fs::write(&doc, base).unwrap();
let response = "Agent response here";
let mut content_ours = base.to_string();
content_ours.push_str("\n## Assistant\n\n");
content_ours.push_str(response);
content_ours.push_str("\n\n## User\n\n");
let user_edited = format!("{}Follow-up question\n", base);
fs::write(&doc, &user_edited).unwrap();
let merged = merge::merge_contents(base, &content_ours, &user_edited).unwrap();
atomic_write(&doc, &merged).unwrap();
assert!(merged.contains(response), "response missing from merged");
assert!(merged.contains("Follow-up question"), "user edit missing from merged");
snapshot::save(&doc, &content_ours).unwrap();
let snap = snapshot::load(&doc).unwrap().unwrap();
assert!(snap.contains(response), "snapshot should have response");
assert!(
!snap.contains("Follow-up question"),
"snapshot must NOT contain concurrent user edit — \
otherwise the next diff won't detect it"
);
let current = fs::read_to_string(&doc).unwrap();
assert_ne!(snap, current, "snapshot and file should differ (user edit not in snapshot)");
assert!(
current.contains("Follow-up question"),
"current file should contain user's edit"
);
}
#[test]
fn try_ipc_returns_false_when_no_patches_dir() {
let dir = TempDir::new().unwrap();
let doc = dir.path().join("test.md");
fs::write(&doc, "content").unwrap();
let patches: Vec<crate::template::PatchBlock> = vec![];
let result = try_ipc(&doc, &patches, "", None, None, None).unwrap();
assert!(!result, "should return false when patches dir doesn't exist");
}
#[test]
fn try_ipc_times_out_when_no_plugin() {
let dir = TempDir::new().unwrap();
let agent_doc_dir = dir.path().join(".agent-doc");
fs::create_dir_all(agent_doc_dir.join("patches")).unwrap();
fs::create_dir_all(agent_doc_dir.join("snapshots")).unwrap();
fs::create_dir_all(agent_doc_dir.join("crdt")).unwrap();
let doc = dir.path().join("test.md");
fs::write(&doc, "---\nsession: test\n---\n\n<!-- agent:exchange -->\ncontent\n<!-- /agent:exchange -->\n").unwrap();
let patch = crate::template::PatchBlock {
name: "exchange".to_string(),
content: "new content".to_string(),
};
let result = try_ipc(&doc, &[patch], "", None, None, None).unwrap();
assert!(!result, "should return false on timeout (no plugin)");
let patches_dir = agent_doc_dir.join("patches");
let entries: Vec<_> = fs::read_dir(&patches_dir)
.unwrap()
.filter_map(|e| e.ok())
.collect();
assert!(entries.is_empty(), "patch file should be cleaned up after timeout");
}
#[test]
fn try_ipc_succeeds_when_plugin_consumes() {
let dir = TempDir::new().unwrap();
let agent_doc_dir = dir.path().join(".agent-doc");
fs::create_dir_all(agent_doc_dir.join("patches")).unwrap();
fs::create_dir_all(agent_doc_dir.join("snapshots")).unwrap();
fs::create_dir_all(agent_doc_dir.join("crdt")).unwrap();
let doc = dir.path().join("test.md");
fs::write(&doc, "---\nsession: test\n---\n\n<!-- agent:exchange -->\ncontent\n<!-- /agent:exchange -->\n").unwrap();
let patch = crate::template::PatchBlock {
name: "exchange".to_string(),
content: "new content".to_string(),
};
let patches_dir = agent_doc_dir.join("patches");
let watcher_dir = patches_dir.clone();
let doc_for_watcher = doc.clone();
let _watcher = std::thread::spawn(move || {
for _ in 0..20 {
std::thread::sleep(std::time::Duration::from_millis(50));
if let Ok(entries) = fs::read_dir(&watcher_dir) {
for entry in entries.flatten() {
if entry.path().extension().is_some_and(|e| e == "json") {
let _ = fs::write(&doc_for_watcher,
"---\nsession: test\n---\n\n<!-- agent:exchange -->\nnew content\n<!-- /agent:exchange -->\n");
let _ = fs::remove_file(entry.path());
return;
}
}
}
}
});
let result = try_ipc(&doc, &[patch], "", None, None, None).unwrap();
assert!(result, "should return true when plugin consumes patch");
}
#[test]
fn try_ipc_full_content_returns_false_when_no_patches_dir() {
let dir = TempDir::new().unwrap();
let doc = dir.path().join("test.md");
fs::write(&doc, "content").unwrap();
let result = try_ipc_full_content(&doc, "new content").unwrap();
assert!(!result, "should return false when patches dir doesn't exist");
}
#[test]
fn sanitize_escapes_open_agent_tag() {
let input = "Here is an example: <!-- agent:exchange --> marker.";
let result = sanitize_component_tags(input);
assert!(
result.contains("<!-- agent:exchange -->"),
"open agent tag should be escaped, got: {}",
result
);
assert!(
!result.contains("<!-- agent:exchange -->"),
"raw open agent tag should not remain"
);
}
#[test]
fn sanitize_escapes_close_agent_tag() {
let input = "End marker: <!-- /agent:pending --> done.";
let result = sanitize_component_tags(input);
assert!(
result.contains("<!-- /agent:pending -->"),
"close agent tag should be escaped, got: {}",
result
);
assert!(
!result.contains("<!-- /agent:pending -->"),
"raw close agent tag should not remain"
);
}
#[test]
fn sanitize_does_not_escape_patch_markers() {
let input = "<!-- patch:exchange -->\nsome content\n<!-- /patch:exchange -->\n";
let result = sanitize_component_tags(input);
assert_eq!(result, input, "patch markers must not be escaped");
}
#[test]
fn sanitize_passes_normal_content_through() {
let input = "Just some normal markdown content.\n\nWith paragraphs and **bold**.";
let result = sanitize_component_tags(input);
assert_eq!(result, input, "normal content should pass through unchanged");
}
#[test]
fn sanitize_preserves_utf8_em_dash() {
let input = "This is a test \u{2014} with em dashes \u{2014} in content.";
let result = sanitize_component_tags(input);
assert_eq!(result, input, "em dashes must survive sanitization unchanged");
assert_eq!(
result.as_bytes(),
input.as_bytes(),
"byte-level content must be identical"
);
}
#[test]
fn sanitize_preserves_mixed_utf8_and_agent_tags() {
let input = "Response with \u{2014} em dash and <!-- agent:exchange --> tag reference.";
let result = sanitize_component_tags(input);
assert!(
result.contains("\u{2014}"),
"em dash must be preserved, got: {:?}",
result
);
assert!(
result.contains("<!-- agent:exchange -->"),
"agent tag must be escaped"
);
}
#[test]
fn sanitize_preserves_various_unicode() {
let input = "Caf\u{00E9} \u{2019}quotes\u{2019} \u{2014} \u{2026} \u{1F600}";
let result = sanitize_component_tags(input);
assert_eq!(result, input, "all unicode must survive sanitization");
}
#[test]
fn try_ipc_snapshot_saves_content_ours() {
let dir = TempDir::new().unwrap();
let agent_doc_dir = dir.path().join(".agent-doc");
fs::create_dir_all(agent_doc_dir.join("patches")).unwrap();
fs::create_dir_all(agent_doc_dir.join("snapshots")).unwrap();
fs::create_dir_all(agent_doc_dir.join("crdt")).unwrap();
let doc = dir.path().join("test.md");
let original = "---\nsession: test\n---\n\n<!-- agent:exchange -->\noriginal content\n<!-- agent:boundary:test-boundary-123 -->\n<!-- /agent:exchange -->\n";
fs::write(&doc, original).unwrap();
let patch = crate::template::PatchBlock {
name: "exchange".to_string(),
content: "agent response content".to_string(),
};
let content_ours = "---\nsession: test\n---\n\n<!-- agent:exchange -->\nagent response content\n<!-- /agent:exchange -->\n";
let user_edited = "---\nsession: test\n---\n\n<!-- agent:exchange -->\noriginal content\nuser typed something new\n<!-- agent:boundary:test-boundary-123 -->\n<!-- /agent:exchange -->\n";
fs::write(&doc, user_edited).unwrap();
let patches_dir = agent_doc_dir.join("patches");
let watcher_dir = patches_dir.clone();
let doc_for_watcher = doc.clone();
let _watcher = std::thread::spawn(move || {
for _ in 0..20 {
std::thread::sleep(std::time::Duration::from_millis(50));
if let Ok(entries) = fs::read_dir(&watcher_dir) {
for entry in entries.flatten() {
if entry.path().extension().is_some_and(|e| e == "json") {
let _ = fs::write(&doc_for_watcher,
"---\nsession: test\n---\n\n<!-- agent:exchange -->\nagent response content\nuser typed something new\n<!-- /agent:exchange -->\n");
let _ = fs::remove_file(entry.path());
return;
}
}
}
}
});
let result = try_ipc(
&doc,
&[patch],
"",
None,
Some(original), Some(content_ours), )
.unwrap();
assert!(result, "IPC should succeed when plugin consumes patch");
let snap = snapshot::load(&doc).unwrap().unwrap();
assert!(
snap.contains("agent response content"),
"snapshot must contain content_ours (agent response), got: {}",
snap
);
assert!(
!snap.contains("user typed something new"),
"snapshot must NOT contain working tree edits — \
it should save content_ours, not the current file"
);
assert_eq!(
snap, content_ours,
"snapshot must exactly match content_ours"
);
let on_disk = fs::read_to_string(&doc).unwrap();
assert!(
on_disk.contains("user typed something new"),
"working tree file should still contain user edits"
);
}
#[test]
fn ipc_json_preserves_utf8_em_dash() {
let content = "Response with \u{2014} em dash.";
let payload = serde_json::json!({
"file": "/tmp/test.md",
"patches": [{
"component": "exchange",
"content": content,
}],
"unmatched": "",
"baseline": "",
});
let json_str = serde_json::to_string_pretty(&payload).unwrap();
let parsed: serde_json::Value = serde_json::from_str(&json_str).unwrap();
let parsed_content = parsed["patches"][0]["content"].as_str().unwrap();
assert_eq!(
parsed_content, content,
"em dash must survive JSON round-trip"
);
assert!(
json_str.contains("\u{2014}"),
"JSON should contain raw UTF-8 em dash"
);
}
#[test]
fn append_mode_component_exchange() {
assert!(is_append_mode_component("exchange"));
assert!(is_append_mode_component("findings"));
}
#[test]
fn replace_mode_components_not_append() {
assert!(!is_append_mode_component("pending"));
assert!(!is_append_mode_component("status"));
assert!(!is_append_mode_component("output"));
assert!(!is_append_mode_component("todo"));
}
#[test]
fn find_boundary_id_skips_code_blocks() {
let content = "<!-- agent:exchange -->\n```\n<!-- agent:boundary:fake-id -->\n```\n<!-- /agent:exchange -->\n";
let result = find_boundary_id(content, "exchange");
assert!(
result.is_none(),
"boundary inside code block must not be found, got: {:?}",
result
);
}
#[test]
fn find_boundary_id_finds_real_marker() {
let content = "<!-- agent:exchange -->\nSome text.\n<!-- agent:boundary:real-uuid-5678 -->\nMore text.\n<!-- /agent:exchange -->\n";
let result = find_boundary_id(content, "exchange");
assert_eq!(result, Some("real-uuid-5678".to_string()));
}
}