use anyhow::{Context, Result};
use serde::Serialize;
use std::collections::{HashMap, HashSet};
use std::path::Path;
use std::process::Command;
use crate::{config, diff, frontmatter, git, recover, resync, sessions, snapshot};
#[derive(Serialize)]
pub struct RelatedDocChange {
pub path: String,
pub summary: String,
pub exists: bool,
}
#[derive(Serialize, Default)]
pub struct PreflightOutput {
pub layout_issues: Vec<String>,
pub recovered: bool,
pub committed: bool,
pub claims: Vec<String>,
pub diff: Option<String>,
pub no_changes: bool,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub linked_changes: Vec<RelatedDocChange>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub baseline_file: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub diff_type: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub diff_type_reason: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub annotated_diff: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub slash_commands: Vec<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub builtin_commands: Vec<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub effective_tier: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub required_tier: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub suggested_tier: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub model_switch: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub model_switch_tier: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub pending_callbacks: Vec<crate::callback::PendingCallback>,
#[serde(default, skip_serializing_if = "indexmap::IndexMap::is_empty")]
pub env: indexmap::IndexMap<String, Option<String>>,
#[serde(default, skip_serializing_if = "std::ops::Not::not")]
pub pending_reordered: bool,
#[serde(default, skip_serializing_if = "is_zero_usize")]
pub pending_gated_count: usize,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub agent_model: Option<String>,
}
fn is_zero_usize(n: &usize) -> bool {
*n == 0
}
fn short_model_name(model_id: &str) -> &str {
if let Some(suffix) = model_id.strip_prefix("claude-") {
return suffix;
}
model_id
}
fn resolve_agent_model(frontmatter_model: Option<&str>) -> Option<String> {
frontmatter_model.map(|m| short_model_name(m).to_string())
}
fn maybe_auto_resync_on_drift(file: &std::path::Path, layout_issues: &[String]) {
let has_drift = layout_issues
.iter()
.any(|i| i.starts_with("session drift:"));
let Ok(canonical) = file.canonicalize() else { return; };
let Some(project_root) = snapshot::find_project_root(&canonical) else { return; };
let state_dir = project_root.join(".agent-doc/state");
let counter_path = state_dir.join("drift.count");
if !has_drift {
if counter_path.exists() {
let _ = std::fs::remove_file(&counter_path);
}
return;
}
let current: u32 = std::fs::read_to_string(&counter_path)
.ok()
.and_then(|s| s.trim().parse().ok())
.unwrap_or(0);
let next = current + 1;
if let Err(e) = std::fs::create_dir_all(&state_dir) {
eprintln!("[preflight] drift state dir create failed: {}", e);
return;
}
if let Err(e) = std::fs::write(&counter_path, next.to_string()) {
eprintln!("[preflight] drift counter write failed: {}", e);
}
if next >= 2 {
eprintln!(
"[preflight] session drift detected {}x consecutively — running `resync --fix`",
next
);
crate::ops_log::log_op(
file,
&format!("auto_resync_on_drift consecutive={}", next),
);
if let Err(e) = resync::run(true, None) {
eprintln!("[preflight] auto-resync failed: {}", e);
} else {
let _ = std::fs::remove_file(&counter_path);
}
} else {
eprintln!(
"[preflight] session drift detected (count={}) — will auto-resync on next detection",
next
);
}
}
pub fn check_layout() -> Vec<String> {
if !sessions::in_tmux() {
return vec![];
}
let mut issues = Vec::new();
let session_name = match Command::new("tmux")
.args(["display-message", "-p", "#{session_name}"])
.output()
{
Ok(out) if out.status.success() => {
String::from_utf8_lossy(&out.stdout).trim().to_string()
}
_ => return issues, };
if session_name.is_empty() {
return issues;
}
let window_output = match Command::new("tmux")
.args([
"list-windows",
"-t",
&format!("{}:", session_name),
"-F",
"#{window_index}\t#{window_name}\t#{window_panes}",
])
.output()
{
Ok(out) if out.status.success() => {
String::from_utf8_lossy(&out.stdout).to_string()
}
_ => return issues,
};
let windows: Vec<u32> = window_output
.lines()
.filter_map(|line| {
let mut parts = line.splitn(3, '\t');
let index: u32 = parts.next()?.parse().ok()?;
Some(index)
})
.collect();
if !windows.contains(&0) {
issues.push(format!(
"window index 0 missing in session '{}' (base-index compliance)",
session_name,
));
}
let registry_path = sessions::registry_path();
let registry: Option<tmux_router::Registry> = std::fs::read_to_string(®istry_path)
.ok()
.and_then(|s| serde_json::from_str(&s).ok());
if let Some(registry) = registry {
let mut pane_sessions: HashSet<String> = HashSet::new();
for entry in registry.values() {
let pane = &entry.pane;
let pane_sess = Command::new("tmux")
.args(["display-message", "-t", pane, "-p", "#{session_name}"])
.output()
.ok()
.filter(|o| o.status.success())
.map(|o| String::from_utf8_lossy(&o.stdout).trim().to_string())
.unwrap_or_default();
if !pane_sess.is_empty() {
pane_sessions.insert(pane_sess);
}
}
if pane_sessions.len() > 1 {
let mut sessions_vec: Vec<&str> = pane_sessions.iter().map(|s| s.as_str()).collect();
sessions_vec.sort();
issues.push(format!(
"session drift: registered panes span {} tmux sessions: {}",
pane_sessions.len(),
sessions_vec.join(", "),
));
}
issues.extend(detect_duplicate_claims(®istry));
}
issues
}
fn detect_duplicate_claims(registry: &tmux_router::Registry) -> Vec<String> {
let mut file_sessions: HashMap<String, Vec<String>> = HashMap::new();
for (session_id, entry) in registry {
if entry.file.is_empty() {
continue;
}
file_sessions
.entry(entry.file.clone())
.or_default()
.push(session_id.clone());
}
let mut issues = Vec::new();
for (file, session_ids) in &file_sessions {
if session_ids.len() > 1 {
let mut sorted = session_ids.clone();
sorted.sort();
issues.push(format!(
"duplicate claims: {} sessions claim '{}': {}",
session_ids.len(),
file,
sorted.join(", "),
));
}
}
issues
}
pub fn run(file: &Path) -> Result<()> {
if !file.exists() {
anyhow::bail!("file not found: {}", file.display());
}
{
let canonical = std::fs::canonicalize(file).unwrap_or_else(|_| file.to_path_buf());
if let Some(root) = snapshot::find_project_root(&canonical) {
let stamp = root.join(".agent-doc/gc.stamp");
let needs_gc = match std::fs::metadata(&stamp) {
Ok(meta) => meta.modified().ok()
.and_then(|t| t.elapsed().ok())
.map(|age| age > std::time::Duration::from_secs(86400))
.unwrap_or(true),
Err(_) => true,
};
if needs_gc {
eprintln!("[preflight] step 0a: auto-gc");
match crate::gc::run(Some(&root), false) {
Ok(result) => {
if result.deleted > 0 {
eprintln!("[preflight] gc: {} files cleaned", result.deleted);
}
let _ = std::fs::write(&stamp, "");
}
Err(e) => eprintln!("[preflight] gc warning: {}", e),
}
}
}
}
if let Ok(Some(last)) = crate::session_check::last_ops_event(file)
&& last.starts_with(crate::session_check::PREFLIGHT_START_EVENT)
{
eprintln!(
"[preflight] WARNING: previous cycle ended at `preflight_diff_start` without a write — interrupted cycle detected"
);
crate::ops_log::log_op(file, "interrupted_cycle_detected file=");
}
eprintln!("[preflight] step 0: layout check");
let layout_issues = check_layout();
for issue in &layout_issues {
eprintln!("[preflight] layout issue: {}", issue);
}
maybe_auto_resync_on_drift(file, &layout_issues);
eprintln!("[preflight] step 1: recover");
let recovered = recover::run(file).unwrap_or_else(|e| {
eprintln!("[preflight] recover warning: {}", e);
false
});
if let Err(e) = snapshot::ensure_initialized(file) {
eprintln!("[preflight] warning: auto-init failed: {}", e);
}
let (pending_reordered, pending_gated_count) =
run_pending_maintenance(file).unwrap_or_else(|e| {
eprintln!("[preflight] pending maintenance warning: {}", e);
(false, 0)
});
eprintln!("[preflight] step 2: commit");
let committed = match git::commit(file) {
Ok(()) => true,
Err(e) => {
eprintln!("[preflight] commit warning: {}", e);
false
}
};
let baseline_file = {
let canonical = std::fs::canonicalize(file).unwrap_or_else(|_| file.to_path_buf());
let hash = snapshot::doc_hash(&canonical).unwrap_or_else(|_| "unknown".to_string());
let baseline_dir = snapshot::find_project_root(&canonical)
.unwrap_or_else(|| file.parent().unwrap_or(Path::new(".")).to_path_buf())
.join(".agent-doc/baselines");
let _ = std::fs::create_dir_all(&baseline_dir);
let baseline_path = baseline_dir.join(format!("{}.md", hash));
match std::fs::read_to_string(file) {
Ok(content) => {
let _ = std::fs::write(&baseline_path, &content);
eprintln!("[preflight] baseline saved: {}", baseline_path.display());
Some(baseline_path.to_string_lossy().to_string())
}
Err(e) => {
eprintln!("[preflight] failed to save baseline: {}", e);
None
}
}
};
{
if let Ok(content) = std::fs::read_to_string(file)
&& let Ok((fm, _)) = frontmatter::parse(&content)
&& let Some(threshold) = fm.auto_compact
&& threshold > 0
&& fm.resolve_mode().is_template()
&& let Some(comp) = crate::component::parse(&content).ok().and_then(|comps| comps.into_iter().find(|c| c.name == "exchange"))
{
let comp_content = &content[comp.open_end..comp.close_start];
let line_count = comp_content.lines().count();
if line_count > threshold {
eprintln!(
"[preflight] step 2c: auto-compact (exchange={} lines > threshold={})",
line_count, threshold
);
if let Err(e) = crate::compact::run(file, None, Some("exchange"), None, None) {
eprintln!("[preflight] auto-compact warning: {}", e);
}
}
}
}
{
let canonical = std::fs::canonicalize(file).unwrap_or_else(|_| file.to_path_buf());
if let Some(root) = snapshot::find_project_root(&canonical) {
let sessions_path = root.join(".agent-doc/sessions.json");
if let Ok(content) = std::fs::read_to_string(&sessions_path)
&& let Ok(registry) = serde_json::from_str::<std::collections::HashMap<String, serde_json::Value>>(&content)
{
for entry in registry.values() {
let tracked_file = entry.get("file").and_then(|v| v.as_str()).unwrap_or("");
if tracked_file.is_empty() { continue; }
let doc_path = root.join(tracked_file);
if doc_path == canonical { continue; } if !doc_path.exists() { continue; }
let snap_rel = match snapshot::path_for(&doc_path) {
Ok(rel) => rel,
Err(_) => continue,
};
let snap_abs = root.join(&snap_rel);
let snap_is_newer = (|| {
let snap_mtime = std::fs::metadata(&snap_abs).ok()?.modified().ok()?;
let doc_mtime = std::fs::metadata(&doc_path).ok()?.modified().ok()?;
Some(snap_mtime > doc_mtime)
})()
.unwrap_or(true); if snap_is_newer {
if let (Ok(snap_content), Ok(doc_content)) =
(std::fs::read_to_string(&snap_abs), std::fs::read_to_string(&doc_path))
&& !crate::diff::is_stale_snapshot(&snap_content, &doc_content)
{
let snap_stripped = crate::diff::strip_comments(&snap_content);
let doc_stripped = crate::diff::strip_comments(&doc_content);
if snap_stripped.trim() != doc_stripped.trim() {
eprintln!(
"[preflight] sweep: skipping {} (unresponded user content)",
doc_path.display()
);
continue;
}
}
let fresh = git::last_commit_mtime(&doc_path)
.ok()
.flatten()
.and_then(|t| t.elapsed().ok())
.is_some_and(|e| e.as_secs() < 5);
if fresh {
eprintln!("[preflight] sweep: skipping {} (committed <5s ago)", doc_path.display());
continue;
}
match git::commit(&doc_path) {
Ok(()) => eprintln!("[preflight] sweep: committed {}", doc_path.display()),
Err(e) => eprintln!("[preflight] sweep: warning for {}: {}", doc_path.display(), e),
}
}
}
}
}
}
eprintln!("[preflight] step 3: claims");
let claims = read_and_truncate_claims(file);
{
let debounce_ms = std::fs::read_to_string(file)
.ok()
.and_then(|content| {
frontmatter::parse(&content).ok().and_then(|(fm, _)| fm.debounce_ms)
})
.unwrap_or(2000);
let debounce = std::time::Duration::from_millis(debounce_ms);
let max_wait = std::time::Duration::from_secs(if debounce_ms > 3000 { (debounce_ms / 1000) + 1 } else { 3 });
let poll = std::time::Duration::from_millis(100);
let start = std::time::Instant::now();
let file_str = file.to_string_lossy();
tracing::debug!(debounce_ms, file = %file.display(), "preflight debounce starting");
loop {
let idle_for = std::fs::metadata(file)
.and_then(|m| m.modified())
.ok()
.and_then(|t| t.elapsed().ok())
.unwrap_or(debounce);
let typing_active = agent_doc::debounce::is_typing_via_file(&file_str, 1500);
tracing::trace!(
idle_ms = idle_for.as_millis() as u64,
typing_active,
elapsed_ms = start.elapsed().as_millis() as u64,
"preflight debounce poll"
);
if idle_for >= debounce && !typing_active {
tracing::debug!(
idle_ms = idle_for.as_millis() as u64,
waited_ms = start.elapsed().as_millis() as u64,
"preflight debounce settled"
);
break;
}
if start.elapsed() >= max_wait {
if typing_active {
tracing::warn!(waited_ms = start.elapsed().as_millis() as u64, "preflight debounce timeout (typing still active)");
eprintln!("[preflight] typing indicator active but timeout after {:.1}s — proceeding", start.elapsed().as_secs_f64());
} else {
tracing::warn!(waited_ms = start.elapsed().as_millis() as u64, "preflight debounce timeout (mtime not settled)");
eprintln!("[preflight] mtime debounce timeout after {:.1}s — proceeding", start.elapsed().as_secs_f64());
}
break;
}
std::thread::sleep(poll);
}
}
eprintln!("[preflight] step 3c: related docs");
let linked_changes = check_linked_docs(file);
for change in &linked_changes {
eprintln!("[preflight] related doc change: {} — {}", change.path, change.summary);
}
eprintln!("[preflight] step 4: diff");
{
let snap_len = crate::snapshot::load(file).unwrap_or(None).map(|s| s.len()).unwrap_or(0);
let file_len = std::fs::metadata(file).map(|m| m.len() as usize).unwrap_or(0);
crate::ops_log::log_op(file, &format!(
"preflight_diff_start file={} snap_len={} file_len={}",
file.display(), snap_len, file_len
));
}
let raw_diff = diff::compute(file)?;
let no_changes = raw_diff.is_none();
let global_config = config::load().unwrap_or_default();
let harness = agent_doc::model_tier::detect_harness();
let model_scan = raw_diff.as_ref().map(|d| {
agent_doc::model_tier::scan_model_switch(d, &harness, &global_config.model)
});
let diff_result: Option<String> = if let Some(scan) = model_scan.as_ref() {
Some(scan.stripped_diff.clone())
} else {
raw_diff.clone()
};
let classification = diff_result.as_ref().map(|d| diff::classify_diff(d));
let annotated_diff = diff_result.as_ref().and_then(|d| diff::annotate_diff(d));
let parsed_commands = diff_result
.as_ref()
.map(|d| diff::parse_slash_commands_classified(d))
.unwrap_or_else(|| diff::ParsedSlashCommands {
skill_commands: vec![],
builtin_commands: vec![],
});
let slash_commands = parsed_commands.skill_commands;
let builtin_commands = parsed_commands.builtin_commands;
let (frontmatter_tier, component_tier_value, frontmatter_env, frontmatter_model) = match std::fs::read_to_string(file) {
Ok(content) => {
let (fm_tier, env_map, fm_model) = frontmatter::parse(&content)
.ok()
.map(|(fm, _)| (fm.model_tier, fm.env, fm.model))
.unwrap_or_default();
let comp_value = agent_doc::model_tier::extract_model_component(&content);
(fm_tier, comp_value, env_map, fm_model)
}
Err(_) => (None, None, Default::default(), None),
};
let component_tier = component_tier_value.as_deref().and_then(|v| {
agent_doc::model_tier::component_value_to_tier(v, &harness, &global_config.model)
});
let lines_added = diff_result
.as_ref()
.map(|d| {
d.lines()
.filter(|l| l.starts_with('+') && !l.starts_with("+++"))
.count()
})
.unwrap_or(0);
let diff_type_str: Option<String> = classification.as_ref().and_then(|c| {
serde_json::to_value(&c.diff_type)
.ok()
.and_then(|v| v.as_str().map(|s| s.to_string()))
});
let suggested = agent_doc::model_tier::suggested_tier(
diff_type_str.as_deref(),
lines_added,
file,
);
let model_switch_name = model_scan.as_ref().and_then(|s| s.model_switch.clone());
let model_switch_tier = model_scan.as_ref().and_then(|s| s.model_switch_tier);
let required_tier_value = component_tier.or(frontmatter_tier);
let effective_tier_value = agent_doc::model_tier::compose_effective_tier(
model_switch_tier,
component_tier,
frontmatter_tier,
suggested,
);
let pending_callbacks = crate::callback::scan_pending_callbacks(None)
.unwrap_or_default();
if !pending_callbacks.is_empty() {
eprintln!("[preflight] found {} pending callback(s)", pending_callbacks.len());
}
let agent_model = resolve_agent_model(frontmatter_model.as_deref());
let output = PreflightOutput {
layout_issues,
recovered,
committed,
claims,
diff: diff_result,
no_changes,
linked_changes,
baseline_file,
diff_type: diff_type_str.clone(),
diff_type_reason: classification.map(|c| c.diff_type_reason),
annotated_diff,
slash_commands,
builtin_commands,
effective_tier: Some(effective_tier_value.to_string()),
required_tier: required_tier_value.map(|t| t.to_string()),
suggested_tier: Some(suggested.to_string()),
model_switch: model_switch_name,
model_switch_tier: model_switch_tier.map(|t| t.to_string()),
pending_callbacks,
env: frontmatter_env,
pending_reordered,
pending_gated_count,
agent_model,
};
let json = serde_json::to_string_pretty(&output)
.context("failed to serialize preflight output")?;
println!("{}", json);
Ok(())
}
fn run_pending_maintenance(file: &Path) -> Result<(bool, usize)> {
let content = match std::fs::read_to_string(file) {
Ok(c) => c,
Err(_) => return Ok((false, 0)),
};
let components = match crate::component::parse(&content) {
Ok(cs) => cs,
Err(_) => return Ok((false, 0)),
};
let comp = match components.into_iter().find(|c| c.name == "pending") {
Some(c) => c,
None => return Ok((false, 0)),
};
let body = &content[comp.open_end..comp.close_start];
let canonical = std::fs::canonicalize(file).unwrap_or_else(|_| file.to_path_buf());
let doc_id = snapshot::doc_hash(&canonical).unwrap_or_else(|_| file.display().to_string());
let mut current_body = body.to_string();
let mut mutated = false;
let (after_backfill, changed) =
crate::pending::backfill(¤t_body, &doc_id, &std::collections::HashSet::new());
if changed {
eprintln!("[preflight] pending: backfilled missing hash ids / checkboxes");
current_body = after_backfill;
mutated = true;
}
let (after_reap, removed_items) = crate::pending::reap_with_items(¤t_body);
if !removed_items.is_empty() {
let removed_ids: Vec<String> =
removed_items.iter().map(|i| i.id.clone()).collect();
eprintln!(
"[preflight] pending: reaped {} item(s): {}",
removed_items.len(),
removed_ids.join(", ")
);
current_body = after_reap;
mutated = true;
}
if mutated {
let mut new_file_content = comp.replace_content(&content, ¤t_body);
if !removed_items.is_empty()
&& let Some(archived) =
archive_pending_done(&new_file_content, &removed_items)
{
new_file_content = archived;
}
std::fs::write(file, &new_file_content)
.with_context(|| format!("failed to write pending updates to {}", file.display()))?;
if let Ok(Some(snap_content)) = snapshot::load(file) {
let snap_comps = crate::component::parse(&snap_content).ok();
if let Some(snap_pending) = snap_comps
.and_then(|cs| cs.into_iter().find(|c| c.name == "pending"))
{
let mut new_snap = snap_pending.replace_content(&snap_content, ¤t_body);
if !removed_items.is_empty()
&& let Some(archived) = archive_pending_done(&new_snap, &removed_items)
{
new_snap = archived;
}
if let Err(e) = snapshot::save(file, &new_snap) {
eprintln!("[preflight] pending: snapshot sync warning: {}", e);
}
}
}
}
let reordered = match snapshot::load(file).unwrap_or(None) {
Some(snap) => {
let snap_comp = crate::component::parse(&snap)
.ok()
.and_then(|comps| comps.into_iter().find(|c| c.name == "pending"));
if let Some(sc) = snap_comp {
let snap_body = &snap[sc.open_end..sc.close_start];
crate::pending::detect_reorder(snap_body, ¤t_body).is_some()
} else {
false
}
}
None => false,
};
if reordered {
eprintln!("[preflight] pending: reorder detected (skill must not reorder this cycle)");
}
let (_, items, _) = crate::pending::parse_items(¤t_body);
let gated_count = items
.iter()
.filter(|i| matches!(i.state, crate::pending::PendingState::Gated))
.count();
if gated_count > 0 {
eprintln!("[preflight] pending: {} gated item(s)", gated_count);
}
Ok((reordered, gated_count))
}
fn archive_pending_done(
content: &str,
removed: &[crate::pending::PendingItem],
) -> Option<String> {
if removed.is_empty() {
return None;
}
let components = crate::component::parse(content).ok()?;
let archive = components.into_iter().find(|c| c.name == "pending-done")?;
let existing_body = &content[archive.open_end..archive.close_start];
let today = std::process::Command::new("date")
.args(["+%Y-%m-%d"])
.output()
.ok()
.map(|o| String::from_utf8_lossy(&o.stdout).trim().to_string())
.filter(|s| !s.is_empty())
.unwrap_or_else(|| "unknown-date".to_string());
let mut new_body = existing_body.to_string();
if !new_body.is_empty() && !new_body.ends_with('\n') {
new_body.push('\n');
}
for item in removed {
new_body.push_str(&format!("- {} [#{}] {}\n", today, item.id, item.text));
}
Some(archive.replace_content(content, &new_body))
}
fn read_and_truncate_claims(file: &Path) -> Vec<String> {
let canonical = match file.canonicalize() {
Ok(p) => p,
Err(_) => return vec![],
};
let root = match snapshot::find_project_root(&canonical) {
Some(r) => r,
None => return vec![],
};
let log_path = root.join(".agent-doc/claims.log");
let contents = match std::fs::read_to_string(&log_path) {
Ok(s) => s,
Err(_) => return vec![],
};
if contents.is_empty() {
return vec![];
}
let claims: Vec<String> = contents
.lines()
.filter(|l| !l.trim().is_empty())
.map(|l| l.to_string())
.collect();
if let Err(e) = std::fs::write(&log_path, "") {
eprintln!("[preflight] failed to truncate claims log: {}", e);
}
claims
}
fn is_url(link: &str) -> bool {
link.starts_with("http://") || link.starts_with("https://")
}
fn links_cache_dir(file: &Path) -> Option<std::path::PathBuf> {
let mut search = file.parent();
while let Some(d) = search {
let candidate = d.join(".agent-doc");
if candidate.is_dir() {
let cache = candidate.join("links_cache");
std::fs::create_dir_all(&cache).ok()?;
return Some(cache);
}
search = d.parent();
}
None
}
fn url_cache_path(cache_dir: &Path, url: &str) -> std::path::PathBuf {
use sha2::{Digest, Sha256};
let hash = format!("{:x}", Sha256::digest(url.as_bytes()));
cache_dir.join(format!("{}.txt", hash))
}
fn html_to_markdown(html: &str) -> String {
use htmd::HtmlToMarkdown;
let converter = HtmlToMarkdown::builder()
.skip_tags(vec!["script", "style", "nav", "footer", "noscript", "svg"])
.build();
converter.convert(html).unwrap_or_else(|_| html.to_string())
}
fn is_html_content(content_type: &str) -> bool {
content_type.contains("text/html") || content_type.contains("application/xhtml")
}
fn check_url_link(url: &str, cache_dir: &Path) -> RelatedDocChange {
let cache_path = url_cache_path(cache_dir, url);
let cached = std::fs::read_to_string(&cache_path).ok();
let agent = ureq::AgentBuilder::new()
.timeout(std::time::Duration::from_secs(10))
.build();
let response = agent.get(url).call();
match response {
Ok(resp) => {
let content_type = resp
.header("content-type")
.unwrap_or("")
.to_string();
let body = match resp.into_string() {
Ok(b) => b,
Err(e) => {
return RelatedDocChange {
path: url.to_string(),
summary: format!("fetch error: {}", e),
exists: false,
};
}
};
let content = if is_html_content(&content_type) {
html_to_markdown(&body)
} else {
body
};
match cached {
Some(ref old) if old == &content => {
RelatedDocChange {
path: url.to_string(),
summary: String::new(), exists: true,
}
}
Some(_) => {
let _ = std::fs::write(&cache_path, &content);
RelatedDocChange {
path: url.to_string(),
summary: format!("content changed ({} bytes)", content.len()),
exists: true,
}
}
None => {
let _ = std::fs::write(&cache_path, &content);
RelatedDocChange {
path: url.to_string(),
summary: format!("initial fetch ({} bytes)", content.len()),
exists: true,
}
}
}
}
Err(e) => RelatedDocChange {
path: url.to_string(),
summary: format!("fetch failed: {}", e),
exists: false,
},
}
}
fn check_linked_docs(file: &Path) -> Vec<RelatedDocChange> {
let content = match std::fs::read_to_string(file) {
Ok(c) => c,
Err(_) => return vec![],
};
let fm = match frontmatter::parse(&content) {
Ok((fm, _)) => fm,
Err(_) => return vec![],
};
if fm.links.is_empty() {
return vec![];
}
let our_snapshot_mtime = snapshot::path_for(file)
.ok()
.and_then(|p| std::fs::metadata(&p).ok())
.and_then(|m| m.modified().ok());
let doc_dir = match file.parent() {
Some(d) => d,
None => return vec![],
};
let cache_dir = links_cache_dir(file);
let mut changes = Vec::new();
for link in &fm.links {
if is_url(link) {
if let Some(ref cache) = cache_dir {
let change = check_url_link(link, cache);
if !change.summary.is_empty() {
changes.push(change);
}
} else {
eprintln!("[preflight] warning: cannot resolve links cache for URL: {}", link);
}
continue;
}
let resolved = doc_dir.join(link);
if !resolved.exists() {
changes.push(RelatedDocChange {
path: link.clone(),
summary: "file not found".to_string(),
exists: false,
});
continue;
}
let related_mtime = match git::last_commit_mtime(&resolved) {
Ok(Some(t)) => t,
_ => continue, };
let is_newer = match our_snapshot_mtime {
Some(snap_time) => related_mtime > snap_time,
None => true, };
if !is_newer {
continue;
}
let summary = recent_commit_summary(&resolved, our_snapshot_mtime);
changes.push(RelatedDocChange {
path: link.clone(),
summary,
exists: true,
});
}
changes
}
fn recent_commit_summary(file: &Path, since: Option<std::time::SystemTime>) -> String {
let since_arg = since.and_then(|t| {
t.duration_since(std::time::UNIX_EPOCH)
.ok()
.map(|d| format!("--since={}", d.as_secs()))
});
let (git_root, resolved) = match git::resolve_to_git_root(file) {
Ok(pair) => pair,
Err(_) => return "changed (git unavailable)".to_string(),
};
let rel_path = resolved
.strip_prefix(&git_root)
.unwrap_or(&resolved);
let mut args = vec!["log", "--oneline", "-5"];
let since_str;
if let Some(ref s) = since_arg {
since_str = s.clone();
args.push(&since_str);
}
args.push("--");
let rel_str = rel_path.to_string_lossy().to_string();
args.push(&rel_str);
let output = std::process::Command::new("git")
.current_dir(&git_root)
.args(&args)
.output();
match output {
Ok(out) if out.status.success() => {
let text = String::from_utf8_lossy(&out.stdout).to_string();
let lines: Vec<&str> = text.lines().take(5).collect();
if lines.is_empty() {
"changed".to_string()
} else {
lines.join("; ")
}
}
_ => "changed (git log failed)".to_string(),
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::process::Command;
use tempfile::TempDir;
fn setup_project() -> TempDir {
let dir = TempDir::new().unwrap();
std::fs::create_dir_all(dir.path().join(".agent-doc/snapshots")).unwrap();
std::fs::create_dir_all(dir.path().join(".agent-doc/pending")).unwrap();
std::fs::create_dir_all(dir.path().join(".agent-doc/locks")).unwrap();
Command::new("git")
.current_dir(dir.path())
.args(["init"])
.output()
.ok();
Command::new("git")
.current_dir(dir.path())
.args(["config", "user.email", "test@test.com"])
.output()
.ok();
Command::new("git")
.current_dir(dir.path())
.args(["config", "user.name", "Test"])
.output()
.ok();
dir
}
#[test]
fn preflight_produces_valid_json() {
let dir = setup_project();
let doc = dir.path().join("session.md");
std::fs::write(
&doc,
"---\nsession: test\n---\n\n## User\n\nHello\n",
)
.unwrap();
snapshot::save(&doc, &std::fs::read_to_string(&doc).unwrap()).unwrap();
run(&doc).unwrap();
}
#[test]
fn preflight_file_not_found() {
let err = run(Path::new("/nonexistent/missing.md")).unwrap_err();
assert!(err.to_string().contains("file not found"));
}
#[test]
fn preflight_detects_diff() {
let dir = setup_project();
let doc = dir.path().join("session.md");
let original = "---\nsession: test\n---\n\n## User\n\nHello\n";
std::fs::write(&doc, original).unwrap();
snapshot::save(&doc, original).unwrap();
std::fs::write(
&doc,
"---\nsession: test\n---\n\n## User\n\nHello\n\nNew question here.\n",
)
.unwrap();
let diff_result = diff::compute(&doc).unwrap();
assert!(diff_result.is_some(), "diff should detect new content");
}
#[test]
fn preflight_claims_read_and_truncated() {
let dir = setup_project();
let doc = dir.path().join("session.md");
std::fs::write(&doc, "# Doc\n").unwrap();
snapshot::save(&doc, "# Doc\n").unwrap();
let log_path = dir.path().join(".agent-doc/claims.log");
std::fs::write(&log_path, "claim A\nclaim B\n").unwrap();
let claims = read_and_truncate_claims(&doc);
assert_eq!(claims, vec!["claim A", "claim B"]);
let after = std::fs::read_to_string(&log_path).unwrap();
assert!(after.is_empty(), "claims log should be empty after read");
}
#[test]
fn preflight_no_claims_log_returns_empty() {
let dir = setup_project();
let doc = dir.path().join("session.md");
std::fs::write(&doc, "# Doc\n").unwrap();
let claims = read_and_truncate_claims(&doc);
assert!(claims.is_empty());
}
#[test]
fn preflight_output_serializes_correctly() {
let output = PreflightOutput {
layout_issues: vec![],
recovered: false,
committed: true,
claims: vec!["foo".to_string()],
diff: Some("+new line\n".to_string()),
no_changes: false,
linked_changes: vec![],
baseline_file: None,
diff_type: None,
diff_type_reason: None,
annotated_diff: None,
slash_commands: vec![],
builtin_commands: vec![],
..Default::default()
};
let json = serde_json::to_string(&output).unwrap();
let parsed: serde_json::Value = serde_json::from_str(&json).unwrap();
assert_eq!(parsed["recovered"], false);
assert_eq!(parsed["committed"], true);
assert_eq!(parsed["claims"][0], "foo");
assert_eq!(parsed["no_changes"], false);
assert!(parsed["diff"].as_str().is_some());
assert!(parsed.get("document").is_none(), "document field must be absent");
}
#[test]
fn preflight_output_null_diff_when_no_changes() {
let output = PreflightOutput {
layout_issues: vec![],
recovered: false,
committed: false,
claims: vec![],
diff: None,
no_changes: true,
linked_changes: vec![],
baseline_file: None,
diff_type: None,
diff_type_reason: None,
annotated_diff: None,
slash_commands: vec![],
builtin_commands: vec![],
..Default::default()
};
let json = serde_json::to_string(&output).unwrap();
let parsed: serde_json::Value = serde_json::from_str(&json).unwrap();
assert!(parsed["diff"].is_null());
assert_eq!(parsed["no_changes"], true);
}
#[test]
fn check_layout_returns_empty_outside_tmux() {
let saved = std::env::var("TMUX").ok();
unsafe { std::env::remove_var("TMUX") };
let issues = check_layout();
if let Some(val) = saved {
unsafe { std::env::set_var("TMUX", val) };
}
assert!(issues.is_empty(), "expected no issues outside tmux, got: {:?}", issues);
}
#[test]
fn preflight_output_includes_layout_issues() {
let output = PreflightOutput {
layout_issues: vec!["window index 0 missing".to_string()],
recovered: false,
committed: false,
claims: vec![],
diff: None,
no_changes: true,
linked_changes: vec![],
baseline_file: None,
diff_type: None,
diff_type_reason: None,
annotated_diff: None,
slash_commands: vec![],
builtin_commands: vec![],
..Default::default()
};
let json = serde_json::to_string(&output).unwrap();
let parsed: serde_json::Value = serde_json::from_str(&json).unwrap();
assert_eq!(parsed["layout_issues"].as_array().unwrap().len(), 1);
assert_eq!(parsed["layout_issues"][0], "window index 0 missing");
}
#[test]
fn detect_duplicate_claims_empty_registry() {
let registry = tmux_router::Registry::new();
assert!(detect_duplicate_claims(®istry).is_empty());
}
#[test]
fn detect_duplicate_claims_no_duplicates() {
let mut registry = tmux_router::Registry::new();
registry.insert(
"session-a".to_string(),
tmux_router::RegistryEntry {
pane: "%1".to_string(),
pid: 100,
cwd: "/work".to_string(),
started: "2026-01-01".to_string(),
file: "tasks/foo.md".to_string(),
window: "@1".to_string(),
},
);
registry.insert(
"session-b".to_string(),
tmux_router::RegistryEntry {
pane: "%2".to_string(),
pid: 101,
cwd: "/work".to_string(),
started: "2026-01-01".to_string(),
file: "tasks/bar.md".to_string(),
window: "@1".to_string(),
},
);
assert!(detect_duplicate_claims(®istry).is_empty());
}
#[test]
fn detect_duplicate_claims_two_sessions_same_file() {
let mut registry = tmux_router::Registry::new();
registry.insert(
"session-a".to_string(),
tmux_router::RegistryEntry {
pane: "%1".to_string(),
pid: 100,
cwd: "/work".to_string(),
started: "2026-01-01".to_string(),
file: "tasks/shared.md".to_string(),
window: "@1".to_string(),
},
);
registry.insert(
"session-b".to_string(),
tmux_router::RegistryEntry {
pane: "%2".to_string(),
pid: 101,
cwd: "/work".to_string(),
started: "2026-01-01".to_string(),
file: "tasks/shared.md".to_string(),
window: "@1".to_string(),
},
);
let issues = detect_duplicate_claims(®istry);
assert_eq!(issues.len(), 1);
assert!(issues[0].contains("duplicate claims"));
assert!(issues[0].contains("tasks/shared.md"));
assert!(issues[0].contains("session-a"));
assert!(issues[0].contains("session-b"));
}
#[test]
fn detect_duplicate_claims_skips_empty_file_entries() {
let mut registry = tmux_router::Registry::new();
registry.insert(
"session-a".to_string(),
tmux_router::RegistryEntry {
pane: "%1".to_string(),
pid: 100,
cwd: "/work".to_string(),
started: "2026-01-01".to_string(),
file: String::new(), window: "@1".to_string(),
},
);
registry.insert(
"session-b".to_string(),
tmux_router::RegistryEntry {
pane: "%2".to_string(),
pid: 101,
cwd: "/work".to_string(),
started: "2026-01-01".to_string(),
file: String::new(),
window: "@1".to_string(),
},
);
assert!(detect_duplicate_claims(®istry).is_empty());
}
#[test]
fn is_url_detects_http() {
assert!(is_url("http://example.com"));
assert!(is_url("https://example.com/path"));
assert!(!is_url("../relative/path.md"));
assert!(!is_url("tasks/software/agent-doc.md"));
assert!(!is_url(""));
}
#[test]
fn is_html_content_detects_html() {
assert!(is_html_content("text/html; charset=utf-8"));
assert!(is_html_content("text/html"));
assert!(is_html_content("application/xhtml+xml"));
assert!(!is_html_content("application/json"));
assert!(!is_html_content("text/plain"));
}
#[test]
fn html_to_markdown_converts_basic_html() {
let html = "<h1>Title</h1><p>Hello <strong>world</strong>.</p>";
let md = html_to_markdown(html);
assert!(md.contains("Title"), "should contain heading text");
assert!(md.contains("**world**"), "should convert bold");
}
#[test]
fn html_to_markdown_strips_script_and_style() {
let html = "<p>Visible</p><script>alert('xss')</script><style>.foo{}</style><p>Also visible</p>";
let md = html_to_markdown(html);
assert!(md.contains("Visible"));
assert!(md.contains("Also visible"));
assert!(!md.contains("alert"), "script content should be stripped");
assert!(!md.contains(".foo"), "style content should be stripped");
}
#[test]
fn html_to_markdown_strips_nav_and_footer() {
let html = "<nav><a href='/'>Home</a></nav><main><p>Content</p></main><footer>Copyright</footer>";
let md = html_to_markdown(html);
assert!(md.contains("Content"));
assert!(!md.contains("Home"), "nav content should be stripped");
assert!(!md.contains("Copyright"), "footer content should be stripped");
}
#[test]
fn url_cache_path_is_deterministic() {
let dir = TempDir::new().unwrap();
let p1 = url_cache_path(dir.path(), "https://example.com");
let p2 = url_cache_path(dir.path(), "https://example.com");
assert_eq!(p1, p2, "same URL should produce same cache path");
let p3 = url_cache_path(dir.path(), "https://other.com");
assert_ne!(p1, p3, "different URLs should produce different cache paths");
assert!(p1.extension().unwrap() == "txt");
}
#[test]
fn links_cache_dir_creates_directory() {
let dir = setup_project();
let doc = dir.path().join("session.md");
std::fs::write(&doc, "# Doc\n").unwrap();
let cache = links_cache_dir(&doc);
assert!(cache.is_some());
let cache_path = cache.unwrap();
assert!(cache_path.exists());
assert!(cache_path.ends_with("links_cache"));
}
#[test]
fn preflight_output_includes_baseline_file() {
let output = PreflightOutput {
layout_issues: vec![],
recovered: false,
committed: true,
claims: vec![],
diff: None,
no_changes: true,
linked_changes: vec![],
baseline_file: Some("/tmp/baseline.md".to_string()),
diff_type: None,
diff_type_reason: None,
annotated_diff: None,
slash_commands: vec![],
builtin_commands: vec![],
..Default::default()
};
let json = serde_json::to_string(&output).unwrap();
let parsed: serde_json::Value = serde_json::from_str(&json).unwrap();
assert_eq!(parsed["baseline_file"], "/tmp/baseline.md");
}
#[test]
fn preflight_output_omits_baseline_file_when_none() {
let output = PreflightOutput {
layout_issues: vec![],
recovered: false,
committed: false,
claims: vec![],
diff: None,
no_changes: true,
linked_changes: vec![],
baseline_file: None,
diff_type: None,
diff_type_reason: None,
annotated_diff: None,
slash_commands: vec![],
builtin_commands: vec![],
..Default::default()
};
let json = serde_json::to_string(&output).unwrap();
let parsed: serde_json::Value = serde_json::from_str(&json).unwrap();
assert!(parsed.get("baseline_file").is_none(), "baseline_file should be omitted when None");
}
#[test]
fn preflight_output_includes_diff_type_when_set() {
let output = PreflightOutput {
layout_issues: vec![],
recovered: false,
committed: true,
claims: vec![],
diff: Some("+go\n".to_string()),
no_changes: false,
linked_changes: vec![],
baseline_file: None,
diff_type: Some("approval".to_string()),
diff_type_reason: Some("single approval word: \"go\"".to_string()),
annotated_diff: None,
slash_commands: vec![],
builtin_commands: vec![],
..Default::default()
};
let json = serde_json::to_string(&output).unwrap();
let parsed: serde_json::Value = serde_json::from_str(&json).unwrap();
assert_eq!(parsed["diff_type"], "approval");
assert!(parsed["diff_type_reason"].as_str().unwrap().contains("go"));
}
#[test]
fn preflight_output_omits_diff_type_when_none() {
let output = PreflightOutput {
layout_issues: vec![],
recovered: false,
committed: false,
claims: vec![],
diff: None,
no_changes: true,
linked_changes: vec![],
baseline_file: None,
diff_type: None,
diff_type_reason: None,
annotated_diff: None,
slash_commands: vec![],
builtin_commands: vec![],
..Default::default()
};
let json = serde_json::to_string(&output).unwrap();
let parsed: serde_json::Value = serde_json::from_str(&json).unwrap();
assert!(parsed.get("diff_type").is_none(), "diff_type should be omitted when None");
assert!(parsed.get("diff_type_reason").is_none(), "diff_type_reason should be omitted when None");
}
#[test]
fn preflight_output_includes_annotated_diff_when_set() {
let output = PreflightOutput {
layout_issues: vec![],
recovered: false,
committed: true,
claims: vec![],
diff: Some("+line\n".to_string()),
no_changes: false,
linked_changes: vec![],
baseline_file: None,
diff_type: None,
diff_type_reason: None,
annotated_diff: Some("[user+] line".to_string()),
slash_commands: vec![],
builtin_commands: vec![],
..Default::default()
};
let json = serde_json::to_string(&output).unwrap();
let parsed: serde_json::Value = serde_json::from_str(&json).unwrap();
assert_eq!(parsed["annotated_diff"], "[user+] line");
}
#[test]
fn preflight_output_omits_annotated_diff_when_none() {
let output = PreflightOutput {
layout_issues: vec![],
recovered: false,
committed: false,
claims: vec![],
diff: None,
no_changes: true,
linked_changes: vec![],
baseline_file: None,
diff_type: None,
diff_type_reason: None,
annotated_diff: None,
slash_commands: vec![],
builtin_commands: vec![],
..Default::default()
};
let json = serde_json::to_string(&output).unwrap();
let parsed: serde_json::Value = serde_json::from_str(&json).unwrap();
assert!(parsed.get("annotated_diff").is_none(), "annotated_diff should be omitted when None");
}
#[test]
fn preflight_output_slash_commands_from_diff() {
let diff = "--- snapshot\n+++ document\n@@ -1 +1,2 @@\n ctx\n+/clear\n";
let parsed_cmds = crate::diff::parse_slash_commands_classified(diff);
let output = PreflightOutput {
layout_issues: vec![],
recovered: false,
committed: false,
claims: vec![],
diff: Some(diff.to_string()),
no_changes: false,
linked_changes: vec![],
baseline_file: None,
diff_type: None,
diff_type_reason: None,
annotated_diff: None,
slash_commands: parsed_cmds.skill_commands,
builtin_commands: parsed_cmds.builtin_commands,
..Default::default()
};
let json = serde_json::to_string(&output).unwrap();
let parsed: serde_json::Value = serde_json::from_str(&json).unwrap();
assert_eq!(parsed["builtin_commands"][0], "/clear");
assert!(parsed["slash_commands"].is_null() || parsed["slash_commands"].as_array().map_or(true, |a| a.is_empty()));
}
#[test]
fn preflight_output_no_document_field() {
let output = PreflightOutput {
layout_issues: vec![],
recovered: false,
committed: false,
claims: vec![],
diff: None,
no_changes: true,
linked_changes: vec![],
baseline_file: None,
diff_type: None,
diff_type_reason: None,
annotated_diff: None,
slash_commands: vec![],
builtin_commands: vec![],
..Default::default()
};
let json = serde_json::to_string(&output).unwrap();
let parsed: serde_json::Value = serde_json::from_str(&json).unwrap();
assert!(
parsed.get("document").is_none(),
"document key must be absent from preflight JSON — it would waste tokens on every cycle"
);
}
#[test]
fn preflight_output_no_large_content() {
let large_content = "x".repeat(10_000);
let output = PreflightOutput {
layout_issues: vec![],
recovered: false,
committed: false,
claims: vec![],
diff: Some(format!("+{large_content}")), no_changes: false,
linked_changes: vec![],
baseline_file: Some("/tmp/baseline.md".to_string()),
diff_type: None,
diff_type_reason: None,
annotated_diff: None,
slash_commands: vec![],
builtin_commands: vec![],
..Default::default()
};
let json = serde_json::to_string(&output).unwrap();
let parsed: serde_json::Value = serde_json::from_str(&json).unwrap();
let diff_str = parsed["diff"].as_str().unwrap_or("");
for (key, val) in parsed.as_object().unwrap() {
if key == "diff" {
continue;
}
let val_str = val.to_string();
assert!(
!val_str.contains(&large_content),
"field `{key}` contains large content — this would waste tokens on every preflight cycle"
);
assert!(
val_str.len() < 1_000 || key == "annotated_diff",
"field `{key}` is suspiciously large ({} bytes) — preflight should not embed document content",
val_str.len()
);
}
assert!(diff_str.contains(&large_content));
}
#[test]
fn preflight_sweep_commits_other_tracked_docs() {
use std::fs;
let dir = setup_project();
let root = dir.path();
let readme = root.join("README.md");
fs::write(&readme, "# project\n").unwrap();
Command::new("git").current_dir(root).args(["add", "README.md"]).output().unwrap();
Command::new("git").current_dir(root).args(["commit", "-m", "initial", "--no-verify"]).output().unwrap();
let primary = root.join("primary.md");
let primary_content = "---\nagent_doc_session: primary\n---\n\n## User\n\nHello\n\n## Assistant\n\nReply\n\n## User\n\n";
fs::write(&primary, primary_content).unwrap();
snapshot::save(&primary, primary_content).unwrap();
Command::new("git").current_dir(root).args(["add", "primary.md"]).output().unwrap();
Command::new("git").current_dir(root).args(["commit", "-m", "add primary", "--no-verify"]).output().unwrap();
let secondary = root.join("secondary.md");
let secondary_content = "---\nagent_doc_session: secondary\n---\n\n## User\n\nHi\n\n## Assistant\n\nResponse\n\n## User\n\n";
fs::write(&secondary, secondary_content).unwrap();
snapshot::save(&secondary, secondary_content).unwrap();
Command::new("git").current_dir(root).args(["add", "secondary.md"]).output().unwrap();
Command::new("git").current_dir(root)
.args(["commit", "-m", "add secondary", "--no-verify"])
.env("GIT_COMMITTER_DATE", "2026-01-01T00:00:00Z")
.env("GIT_AUTHOR_DATE", "2026-01-01T00:00:00Z")
.output().unwrap();
let snap_rel = snapshot::path_for(&secondary).unwrap();
let snap_abs = root.join(&snap_rel);
let new_snap = format!("{}\n<!-- agent updated -->", secondary_content);
fs::write(&snap_abs, &new_snap).unwrap();
let sessions_path = root.join(".agent-doc/sessions.json");
let sessions = serde_json::json!({
"secondary-session": {
"pane": "%1",
"pid": 9999,
"cwd": root.to_string_lossy(),
"started": "2026-01-01",
"file": "secondary.md",
"window": "@1"
}
});
fs::write(&sessions_path, serde_json::to_string_pretty(&sessions).unwrap()).unwrap();
run(&primary).unwrap();
let log = Command::new("git")
.current_dir(root)
.args(["log", "--oneline", "-4"])
.output()
.unwrap();
let log_str = String::from_utf8_lossy(&log.stdout);
assert!(
log_str.contains("agent-doc(secondary):"),
"preflight sweep should have committed secondary.md, got:\n{log_str}"
);
}
#[test]
fn preflight_sweep_skips_doc_with_unresponded_user_content() {
use std::fs;
let dir = setup_project();
let root = dir.path();
let readme = root.join("README.md");
fs::write(&readme, "# project\n").unwrap();
Command::new("git").current_dir(root).args(["add", "README.md"]).output().unwrap();
Command::new("git").current_dir(root).args(["commit", "-m", "initial", "--no-verify"]).output().unwrap();
let primary = root.join("primary.md");
let primary_content = "---\nagent_doc_session: primary\n---\n\n## User\n\nHello\n\n## Assistant\n\nReply\n\n## User\n\n";
fs::write(&primary, primary_content).unwrap();
snapshot::save(&primary, primary_content).unwrap();
Command::new("git").current_dir(root).args(["add", "primary.md"]).output().unwrap();
Command::new("git").current_dir(root).args(["commit", "-m", "add primary", "--no-verify"]).output().unwrap();
let secondary = root.join("secondary.md");
let snap_content = "---\nagent_doc_session: secondary\n---\n\n## User\n\nHi\n\n## Assistant\n\nResponse\n\n## User\n\n";
let doc_content = "---\nagent_doc_session: secondary\n---\n\n## User\n\nHi\n\n## Assistant\n\nResponse\n\n## User\n\nNew question from user\n";
fs::write(&secondary, doc_content).unwrap();
snapshot::save(&secondary, snap_content).unwrap();
Command::new("git").current_dir(root).args(["add", "secondary.md"]).output().unwrap();
Command::new("git").current_dir(root)
.args(["commit", "-m", "add secondary", "--no-verify"])
.env("GIT_COMMITTER_DATE", "2026-01-01T00:00:00Z")
.env("GIT_AUTHOR_DATE", "2026-01-01T00:00:00Z")
.output().unwrap();
let snap_rel = snapshot::path_for(&secondary).unwrap();
let snap_abs = root.join(&snap_rel);
std::thread::sleep(std::time::Duration::from_millis(50));
fs::write(&snap_abs, snap_content).unwrap();
let sessions_path = root.join(".agent-doc/sessions.json");
let sessions = serde_json::json!({
"secondary-session": {
"pane": "%1",
"pid": 9999,
"cwd": root.to_string_lossy(),
"started": "2026-01-01",
"file": "secondary.md",
"window": "@1"
}
});
fs::write(&sessions_path, serde_json::to_string_pretty(&sessions).unwrap()).unwrap();
let log_before = Command::new("git")
.current_dir(root)
.args(["log", "--oneline"])
.output()
.unwrap();
let count_before = String::from_utf8_lossy(&log_before.stdout).lines().count();
run(&primary).unwrap();
let log_after = Command::new("git")
.current_dir(root)
.args(["log", "--oneline"])
.output()
.unwrap();
let log_str = String::from_utf8_lossy(&log_after.stdout);
assert!(
!log_str.contains("agent-doc(secondary):"),
"preflight sweep should NOT have committed secondary.md (has unresponded user content), got:\n{log_str}"
);
let count_after = log_str.lines().count();
assert!(
count_after <= count_before + 1,
"expected at most one new commit (primary), got {} new commits",
count_after - count_before
);
}
#[test]
fn short_model_name_strips_claude_prefix() {
assert_eq!(short_model_name("claude-sonnet-4-6"), "sonnet-4-6");
assert_eq!(short_model_name("claude-opus-4"), "opus-4");
assert_eq!(short_model_name("claude-haiku-4-5"), "haiku-4-5");
}
#[test]
fn short_model_name_returns_as_is_without_prefix() {
assert_eq!(short_model_name("sonnet-4-6"), "sonnet-4-6");
assert_eq!(short_model_name("gpt-4o"), "gpt-4o");
assert_eq!(short_model_name(""), "");
}
#[test]
fn resolve_agent_model_uses_frontmatter_only() {
let result = resolve_agent_model(Some("claude-opus-4"));
assert_eq!(result, Some("opus-4".to_string()));
}
#[test]
fn resolve_agent_model_strips_claude_prefix_from_frontmatter() {
let result = resolve_agent_model(Some("claude-haiku-4-5"));
assert_eq!(result, Some("haiku-4-5".to_string()));
}
#[test]
fn resolve_agent_model_none_when_no_frontmatter() {
let result = resolve_agent_model(None);
assert_eq!(result, None);
}
}