use crate::ai::prompt::{
ExistingHooks, build_followup_prompt, build_initial_prompt, collect_plugins_tree,
};
use crate::ai::{
Backend, HookChoice, HookWriteDecisions, HookWriteResult, Proposal, ProposalSection,
ensure_cli_installed, invoke_oneshot, parse_proposal, run_handoff, should_emit_merged,
validate_proposal_toml, write_hook_files,
};
use anyhow::{Context, Result};
use std::path::Path;
pub enum ChatOutcome {
Applied { hook_changes: HookWriteResult },
Skipped,
HandedOff,
}
#[allow(clippy::too_many_arguments)]
pub async fn run_ai_add(
backend: Backend,
plugin_url: &str,
plugin_root: &Path,
plugin_config_dir: &Path,
config_root: &Path,
user_config_toml_path: &Path,
ai_language: &str,
chezmoi_enabled: bool,
) -> Result<AiAddOutcome> {
ensure_cli_installed(backend)?;
let (user_config_toml, user_plugins_tree) =
collect_user_context(user_config_toml_path, config_root)?;
let existing_hooks = read_existing_hooks(plugin_config_dir);
let has_existing_to_merge_against = !existing_hooks.is_empty();
let merged_supported = should_emit_merged(backend) && has_existing_to_merge_against;
announce_merged_decision(backend, merged_supported, has_existing_to_merge_against);
let initial_prompt = build_initial_prompt(
plugin_url,
plugin_root,
user_config_toml_path,
plugin_config_dir,
&user_config_toml,
&user_plugins_tree,
&existing_hooks,
merged_supported,
ai_language,
)?;
if let Some(out) = maybe_dump_and_skip(&initial_prompt)? {
return Ok(out);
}
eprintln!(
"\u{1f916} Asking {} about {} (this may take a moment)...",
backend.label(),
plugin_url
);
run_chat_loop(
backend,
initial_prompt,
plugin_config_dir,
user_config_toml_path,
existing_hooks,
None,
chezmoi_enabled,
)
.await
}
#[allow(clippy::too_many_arguments)]
pub async fn run_ai_tune(
backend: Backend,
plugin_url: &str,
plugin_root: &Path,
plugin_config_dir: &Path,
config_root: &Path,
user_config_toml_path: &Path,
current_entry_toml: &str,
ai_language: &str,
chezmoi_enabled: bool,
) -> Result<AiAddOutcome> {
ensure_cli_installed(backend)?;
let (user_config_toml, user_plugins_tree) =
collect_user_context(user_config_toml_path, config_root)?;
let existing_hooks = read_existing_hooks(plugin_config_dir);
let merged_supported = should_emit_merged(backend);
announce_merged_decision(backend, merged_supported, true);
let initial_prompt = crate::ai::prompt::build_tune_prompt(
plugin_url,
plugin_root,
user_config_toml_path,
plugin_config_dir,
current_entry_toml,
&user_config_toml,
&user_plugins_tree,
&existing_hooks,
merged_supported,
ai_language,
)?;
if let Some(out) = maybe_dump_and_skip(&initial_prompt)? {
return Ok(out);
}
eprintln!(
"\u{1f916} Asking {} to tune {} (this may take a moment)...",
backend.label(),
plugin_url
);
run_chat_loop(
backend,
initial_prompt,
plugin_config_dir,
user_config_toml_path,
existing_hooks,
Some(current_entry_toml.to_string()),
chezmoi_enabled,
)
.await
}
fn collect_user_context(
user_config_toml_path: &Path,
config_root: &Path,
) -> Result<(String, String)> {
let toml = std::fs::read_to_string(user_config_toml_path)
.with_context(|| format!("failed to read {}", user_config_toml_path.display()))?;
let tree = collect_plugins_tree(config_root);
Ok((toml, tree))
}
fn announce_merged_decision(backend: Backend, merged_supported: bool, has_existing: bool) {
if merged_supported {
return;
}
if !has_existing {
return;
}
if std::env::var_os("RVPM_AI_NO_MERGED").is_some()
|| std::env::var_os("RVPM_AI_FORCE_MERGED").is_some()
{
return;
}
if matches!(backend, Backend::Gemini) {
eprintln!(
"\u{2139}\u{fe0f} Disabled `_merged` variants for Gemini (gemini-cli's \
loop detector aborts on near-duplicate fresh+merged XML output)."
);
eprintln!(
" Set RVPM_AI_FORCE_MERGED=1 to override and try the full 2-variant \
prompt anyway."
);
}
}
fn maybe_dump_and_skip(prompt_text: &str) -> Result<Option<AiAddOutcome>> {
let Ok(dump_path) = std::env::var("RVPM_AI_DUMP_PROMPT") else {
return Ok(None);
};
if dump_path.trim().is_empty() {
return Ok(None);
}
std::fs::write(&dump_path, prompt_text)
.with_context(|| format!("failed to write prompt dump to {dump_path}"))?;
eprintln!(
"\u{1f4dd} Prompt dumped to {} ({} bytes / {} lines). Skipping AI call \
(RVPM_AI_DUMP_PROMPT was set).",
dump_path,
prompt_text.len(),
prompt_text.lines().count()
);
Ok(Some(AiAddOutcome {
outcome: ChatOutcome::Skipped,
plugin_entry_toml: None,
}))
}
fn read_existing_hooks(plugin_dir: &Path) -> ExistingHooks {
let read_one = |name: &str| -> Option<String> {
let path = plugin_dir.join(name);
std::fs::read_to_string(&path).ok()
};
ExistingHooks {
init_lua: read_one("init.lua"),
before_lua: read_one("before.lua"),
after_lua: read_one("after.lua"),
}
}
#[allow(clippy::too_many_arguments)]
async fn run_chat_loop(
backend: Backend,
initial_prompt: String,
plugin_config_dir: &Path,
user_config_toml_path: &Path,
existing_hooks: ExistingHooks,
existing_plugin_entry: Option<String>,
chezmoi_enabled: bool,
) -> Result<AiAddOutcome> {
let mut last_prompt = initial_prompt.clone();
let first_response = invoke_oneshot(backend, &initial_prompt).await?;
let mut proposal = parse_and_validate(&first_response)?;
drop(first_response);
loop {
print_proposal_preview(
&proposal,
plugin_config_dir,
user_config_toml_path,
&existing_hooks,
existing_plugin_entry.as_deref(),
);
match prompt_chat_action().await? {
ChatAction::Apply => {
let tune_mode = existing_plugin_entry.is_some();
let (decisions, plugin_entry_toml) =
resolve_user_decisions(&proposal, &existing_hooks, tune_mode).await?;
let hook_changes =
write_hook_files(plugin_config_dir, &decisions, chezmoi_enabled).await?;
return Ok(AiAddOutcome {
outcome: ChatOutcome::Applied { hook_changes },
plugin_entry_toml,
});
}
ChatAction::Skip => {
return Ok(AiAddOutcome {
outcome: ChatOutcome::Skipped,
plugin_entry_toml: None,
});
}
ChatAction::HandOff => {
let proposal_xml = proposal_to_xml(&proposal);
let handoff_prompt = format!(
"{last_prompt}\n\n\
---\n\n\
# rvpm's latest proposal (already shown to the user)\n\n\
The user just picked **Hand off** in rvpm after seeing the \
proposal below.\n\n\
**Do NOT apply this proposal automatically.** The user \
handed off precisely so they can discuss it with you \
before anything is written. They may want to apply only \
specific parts, ask for refinements, or revise the \
proposal entirely. Wait for the user's explicit \
instruction before running any Edit / Write tools or \
touching `config.toml` / hook files.\n\n\
When the user does ask you to apply something, use the \
absolute paths from the \"On-disk paths\" section above.\n\n\
{proposal_xml}\n"
);
run_handoff(backend, &handoff_prompt).await?;
return Ok(AiAddOutcome {
outcome: ChatOutcome::HandedOff,
plugin_entry_toml: None,
});
}
ChatAction::Chat => {
let followup = ask_followup().await?;
if followup.trim().is_empty() {
eprintln!("(empty feedback, returning to action menu)");
continue;
}
let prior_xml = proposal_to_xml(&proposal);
last_prompt = build_followup_prompt(&initial_prompt, &prior_xml, &followup);
eprintln!(
"\u{1f916} Asking {} for an updated proposal...",
backend.label()
);
let next_response = invoke_oneshot(backend, &last_prompt).await?;
proposal = parse_and_validate(&next_response)?;
}
}
}
}
pub struct AiAddOutcome {
pub outcome: ChatOutcome,
pub plugin_entry_toml: Option<String>,
}
#[derive(Clone, Copy)]
enum ChatAction {
Apply,
Chat,
HandOff,
Skip,
}
async fn prompt_chat_action() -> Result<ChatAction> {
use dialoguer::{Select, theme::ColorfulTheme};
let result = tokio::task::spawn_blocking(|| {
let choices = [
"Apply (pick fresh / merged / keep per-file, then write)",
"Chat (refine with feedback)",
"Hand off to native CLI (rvpm exits, AI continues directly)",
"Skip (discard proposal, no changes)",
];
Select::with_theme(&ColorfulTheme::default())
.with_prompt("How should we proceed?")
.items(choices.as_slice())
.default(0)
.interact()
})
.await
.context("failed to join blocking dialoguer task")??;
Ok(match result {
0 => ChatAction::Apply,
1 => ChatAction::Chat,
2 => ChatAction::HandOff,
_ => ChatAction::Skip,
})
}
async fn ask_followup() -> Result<String> {
use std::io::{self, BufRead, Write};
tokio::task::spawn_blocking(|| -> Result<String> {
eprint!("? Your feedback for the AI: ");
io::stderr().flush().ok();
let mut buf = String::new();
io::stdin()
.lock()
.read_line(&mut buf)
.context("failed to read user input")?;
Ok(buf.trim_end_matches(['\r', '\n']).to_string())
})
.await
.context("failed to join blocking input task")?
}
fn parse_and_validate(response: &str) -> Result<Proposal> {
let mut proposal = parse_proposal(response)?;
let fresh_ok = proposal
.plugin_entry
.fresh
.as_deref()
.map(validate_proposal_toml)
.map(|r| r.is_ok())
.unwrap_or(false);
let merged_ok = proposal
.plugin_entry
.merged
.as_deref()
.map(validate_proposal_toml)
.map(|r| r.is_ok())
.unwrap_or(false);
if !fresh_ok && !merged_ok {
if let Some(s) = proposal.plugin_entry.fresh.as_deref() {
return Err(validate_proposal_toml(s).unwrap_err());
}
if let Some(s) = proposal.plugin_entry.merged.as_deref() {
return Err(validate_proposal_toml(s).unwrap_err());
}
return Err(anyhow::anyhow!("AI proposal had no valid plugin entry"));
}
if !fresh_ok {
proposal.plugin_entry.fresh = None;
}
if !merged_ok {
proposal.plugin_entry.merged = None;
}
drop_duplicate_merged(&mut proposal.plugin_entry);
drop_duplicate_merged(&mut proposal.init_lua);
drop_duplicate_merged(&mut proposal.before_lua);
drop_duplicate_merged(&mut proposal.after_lua);
Ok(proposal)
}
fn drop_duplicate_merged(section: &mut ProposalSection) {
let (Some(fresh), Some(merged)) = (section.fresh.as_deref(), section.merged.as_deref()) else {
return;
};
if essentially_same(fresh, merged) {
section.merged = None;
}
}
fn essentially_same(a: &str, b: &str) -> bool {
let iter_a = a.lines().map(|l| l.trim()).filter(|l| !l.is_empty());
let iter_b = b.lines().map(|l| l.trim()).filter(|l| !l.is_empty());
iter_a.eq(iter_b)
}
fn proposal_to_xml(p: &Proposal) -> String {
let mut out = String::new();
push_section_xml(&mut out, "plugin_entry", &p.plugin_entry);
push_section_xml(&mut out, "init_lua", &p.init_lua);
push_section_xml(&mut out, "before_lua", &p.before_lua);
push_section_xml(&mut out, "after_lua", &p.after_lua);
out.push_str("<rvpm:explanation>\n");
out.push_str(&p.explanation);
out.push_str("\n</rvpm:explanation>\n");
out
}
fn push_section_xml(out: &mut String, name: &str, section: &ProposalSection) {
out.push_str(&format!(
"<rvpm:{name}>\n{}\n</rvpm:{name}>\n",
section.fresh.as_deref().unwrap_or("(none)")
));
if let Some(body) = section.merged.as_deref() {
out.push_str(&format!(
"<rvpm:{name}_merged>\n{body}\n</rvpm:{name}_merged>\n"
));
}
}
async fn resolve_user_decisions(
proposal: &Proposal,
existing_hooks: &ExistingHooks,
tune_mode: bool,
) -> Result<(HookWriteDecisions, Option<String>)> {
let plugin_entry_toml = pick_plugin_entry_decision(&proposal.plugin_entry, tune_mode).await?;
let init_lua = pick_hook_decision(
"init.lua",
&proposal.init_lua,
existing_hooks.init_lua.as_deref(),
)
.await?;
let before_lua = pick_hook_decision(
"before.lua",
&proposal.before_lua,
existing_hooks.before_lua.as_deref(),
)
.await?;
let after_lua = pick_hook_decision(
"after.lua",
&proposal.after_lua,
existing_hooks.after_lua.as_deref(),
)
.await?;
Ok((
HookWriteDecisions {
init_lua,
before_lua,
after_lua,
},
plugin_entry_toml,
))
}
async fn pick_plugin_entry_decision(
section: &ProposalSection,
tune_mode: bool,
) -> Result<Option<String>> {
if !tune_mode {
return Ok(section.fresh.clone());
}
let mut choices: Vec<(String, EntryChoiceKind)> = Vec::new();
if section.fresh.is_some() {
choices.push((
"Use FRESH (clean redesign — overwrite existing entry)".to_string(),
EntryChoiceKind::Fresh,
));
}
if section.merged.is_some() {
choices.push((
"Use MERGED (preserves your fields, adjusts triggers etc.)".to_string(),
EntryChoiceKind::Merged,
));
}
choices.push((
"Keep existing entry (no change)".to_string(),
EntryChoiceKind::Keep,
));
if choices.len() == 1 {
return Ok(None);
}
let labels: Vec<String> = choices.iter().map(|(l, _)| l.clone()).collect();
let pick = pick_index("[[plugins]] entry — choose:", labels).await?;
Ok(match choices[pick].1 {
EntryChoiceKind::Fresh => section.fresh.clone(),
EntryChoiceKind::Merged => section.merged.clone(),
EntryChoiceKind::Keep => None,
})
}
#[derive(Clone, Copy)]
enum EntryChoiceKind {
Fresh,
Merged,
Keep,
}
async fn pick_hook_decision(
name: &str,
section: &ProposalSection,
existing: Option<&str>,
) -> Result<HookChoice> {
if section.is_empty() && existing.is_none() {
return Ok(HookChoice::Keep);
}
if section.is_empty() {
let labels = vec![
format!("Keep existing {name} (no change)"),
format!("Remove existing {name} (AI did not propose anything for it)"),
];
let pick = pick_index(
&format!(
"{name} — AI omitted this hook. Your existing file is left as-is unless you choose Remove:",
),
labels,
)
.await?;
return Ok(if pick == 0 {
HookChoice::Keep
} else {
HookChoice::Remove
});
}
let mut choices: Vec<(String, HookChoiceKind)> = Vec::new();
if section.fresh.is_some() {
let label = if existing.is_some() {
format!("Use FRESH proposal (overwrite existing {name})")
} else {
format!("Use FRESH proposal (create {name})")
};
choices.push((label, HookChoiceKind::Fresh));
}
if section.merged.is_some() {
choices.push((
"Use MERGED proposal (preserves your edits, adds AI suggestions)".to_string(),
HookChoiceKind::Merged,
));
}
let keep_label = if existing.is_some() {
format!("Keep existing {name} (no change)")
} else {
format!("Skip — don't create {name}")
};
choices.push((keep_label, HookChoiceKind::Keep));
if existing.is_some() {
choices.push((
format!("Remove existing {name} (delete the file)"),
HookChoiceKind::Remove,
));
}
let labels: Vec<String> = choices.iter().map(|(l, _)| l.clone()).collect();
let pick = pick_index(&format!("{name} — choose:"), labels).await?;
Ok(match choices[pick].1 {
HookChoiceKind::Fresh => HookChoice::Write(section.fresh.clone().unwrap()),
HookChoiceKind::Merged => HookChoice::Write(section.merged.clone().unwrap()),
HookChoiceKind::Keep => HookChoice::Keep,
HookChoiceKind::Remove => HookChoice::Remove,
})
}
#[derive(Clone, Copy)]
enum HookChoiceKind {
Fresh,
Merged,
Keep,
Remove,
}
async fn pick_index(prompt: &str, labels: Vec<String>) -> Result<usize> {
use dialoguer::{Select, theme::ColorfulTheme};
let prompt_owned = prompt.to_string();
tokio::task::spawn_blocking(move || -> Result<usize> {
Select::with_theme(&ColorfulTheme::default())
.with_prompt(prompt_owned)
.items(&labels)
.default(0)
.interact()
.context("failed to read user choice")
})
.await
.context("failed to join blocking dialoguer task")?
}
fn print_proposal_preview(
p: &Proposal,
plugin_config_dir: &Path,
config_toml_path: &Path,
existing: &ExistingHooks,
existing_plugin_entry: Option<&str>,
) {
let rule = "\u{2500}".repeat(60);
eprintln!();
eprintln!(
"\u{1f4dd} [[plugins]] entry to write into {}:",
config_toml_path.display()
);
print_section_block(
"plugin_entry",
&p.plugin_entry,
existing_plugin_entry,
&rule,
);
print_hook_section_block(
"init.lua",
&p.init_lua,
plugin_config_dir,
existing.init_lua.as_deref(),
&rule,
);
print_hook_section_block(
"before.lua",
&p.before_lua,
plugin_config_dir,
existing.before_lua.as_deref(),
&rule,
);
print_hook_section_block(
"after.lua",
&p.after_lua,
plugin_config_dir,
existing.after_lua.as_deref(),
&rule,
);
eprintln!();
eprintln!("\u{1f4ad} AI explanation:");
for line in p.explanation.lines() {
eprintln!(" {line}");
}
eprintln!();
}
fn print_section_block(name: &str, section: &ProposalSection, existing: Option<&str>, rule: &str) {
use console::style;
eprintln!("{rule}");
let sub_rule = "\u{2504}".repeat(40);
if let Some(body) = existing {
eprintln!(" {}", style(format!("[EXISTING {name}]")).yellow().bold());
eprintln!(" {sub_rule}");
for line in body.lines().take(20) {
eprintln!(" {line}");
}
if body.lines().count() > 20 {
eprintln!(" ... ({} more lines)", body.lines().count() - 20);
}
eprintln!();
}
if let Some(body) = section.fresh.as_deref() {
eprintln!(" {}", style(format!("[FRESH {name}]")).cyan().bold());
eprintln!(" {sub_rule}");
for line in body.lines().take(40) {
eprintln!(" {line}");
}
if body.lines().count() > 40 {
eprintln!(" ... ({} more lines)", body.lines().count() - 40);
}
} else {
eprintln!(
" {} {}",
style(format!("[FRESH {name}]")).cyan().bold(),
style("(none)").dim()
);
}
if let Some(body) = section.merged.as_deref() {
eprintln!();
eprintln!(" {}", style(format!("[MERGED {name}]")).magenta().bold());
eprintln!(" {sub_rule}");
for line in body.lines().take(40) {
eprintln!(" {line}");
}
if body.lines().count() > 40 {
eprintln!(" ... ({} more lines)", body.lines().count() - 40);
}
}
}
fn print_hook_section_block(
name: &str,
section: &ProposalSection,
plugin_dir: &Path,
existing: Option<&str>,
rule: &str,
) {
if section.is_empty() && existing.is_none() {
return;
}
eprintln!();
eprintln!(
"\u{1f4c4} {} (target: {}):",
name,
plugin_dir.join(name).display()
);
if section.is_empty() && existing.is_some() {
use console::style;
eprintln!(
" {} AI did not propose anything for this hook. Pick {} in the next prompt to delete it.",
style("[OMITTED BY AI]").yellow().bold(),
style("Remove existing").yellow()
);
}
print_section_block(name, section, existing, rule);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn proposal_to_xml_emits_all_tags_with_present_lua() {
let p = Proposal {
plugin_entry: ProposalSection {
fresh: Some("[[plugins]]\nurl = \"o/r\"".to_string()),
merged: None,
},
init_lua: ProposalSection {
fresh: Some("vim.g.x = 1".to_string()),
merged: None,
},
before_lua: ProposalSection::default(),
after_lua: ProposalSection {
fresh: Some("require('o').setup({})".to_string()),
merged: None,
},
explanation: "two sentence explanation here.".to_string(),
};
let xml = proposal_to_xml(&p);
assert!(xml.contains("<rvpm:plugin_entry>"));
assert!(xml.contains("</rvpm:plugin_entry>"));
assert!(xml.contains("<rvpm:init_lua>"));
assert!(xml.contains("vim.g.x = 1"));
assert!(xml.contains("<rvpm:before_lua>\n(none)\n</rvpm:before_lua>"));
assert!(xml.contains("require('o').setup"));
assert!(xml.contains("two sentence explanation here."));
assert!(!xml.contains("<rvpm:before_lua_merged>"));
assert!(!xml.contains("<rvpm:after_lua_merged>"));
}
#[test]
fn proposal_to_xml_emits_merged_tag_when_present() {
let p = Proposal {
plugin_entry: ProposalSection {
fresh: Some("[[plugins]]\nurl = \"o/r\"".to_string()),
merged: Some(
r#"[[plugins]]
url = "o/r"
on_cmd = ["Foo"]"#
.to_string(),
),
},
init_lua: ProposalSection::default(),
before_lua: ProposalSection::default(),
after_lua: ProposalSection {
fresh: Some("FRESH".to_string()),
merged: Some("MERGED".to_string()),
},
explanation: "expl".to_string(),
};
let xml = proposal_to_xml(&p);
assert!(xml.contains("<rvpm:plugin_entry_merged>"));
assert!(xml.contains("on_cmd = [\"Foo\"]"));
assert!(xml.contains("<rvpm:after_lua_merged>"));
assert!(xml.contains("MERGED"));
}
#[test]
fn proposal_to_xml_round_trips_through_parse_proposal() {
let original = Proposal {
plugin_entry: ProposalSection {
fresh: Some("[[plugins]]\nurl = \"a/b\"\non_cmd = [\"X\"]".to_string()),
merged: Some("[[plugins]]\nurl = \"a/b\"\non_cmd = [\"X\", \"Y\"]".to_string()),
},
init_lua: ProposalSection {
fresh: Some("a = 1".to_string()),
merged: None,
},
before_lua: ProposalSection::default(),
after_lua: ProposalSection::default(),
explanation: "expl".to_string(),
};
let xml = proposal_to_xml(&original);
let reparsed = crate::ai::parse_proposal(&xml).unwrap();
assert_eq!(reparsed.plugin_entry.fresh, original.plugin_entry.fresh);
assert_eq!(reparsed.plugin_entry.merged, original.plugin_entry.merged);
assert_eq!(reparsed.init_lua.fresh, original.init_lua.fresh);
assert_eq!(reparsed.init_lua.merged, None);
assert!(reparsed.before_lua.fresh.is_none());
assert!(reparsed.after_lua.fresh.is_none());
assert_eq!(reparsed.explanation, original.explanation);
}
#[test]
fn proposal_to_xml_is_smaller_than_typical_raw_response() {
let p = Proposal {
plugin_entry: ProposalSection {
fresh: Some("[[plugins]]\nurl = \"o/r\"".to_string()),
merged: None,
},
init_lua: ProposalSection::default(),
before_lua: ProposalSection::default(),
after_lua: ProposalSection::default(),
explanation: "Brief".to_string(),
};
let compact = proposal_to_xml(&p);
assert!(
compact.len() < 1024,
"compact xml unexpectedly large: {} bytes",
compact.len()
);
}
#[test]
fn read_existing_hooks_returns_none_for_missing_files() {
let tmp = tempfile::tempdir().unwrap();
let plugin_dir = tmp.path().join("p");
std::fs::create_dir_all(&plugin_dir).unwrap();
let hooks = read_existing_hooks(&plugin_dir);
assert!(hooks.is_empty());
}
#[test]
fn read_existing_hooks_returns_some_when_files_exist() {
let tmp = tempfile::tempdir().unwrap();
let plugin_dir = tmp.path().join("p");
std::fs::create_dir_all(&plugin_dir).unwrap();
std::fs::write(plugin_dir.join("after.lua"), "USER CONTENT\n").unwrap();
let hooks = read_existing_hooks(&plugin_dir);
assert!(hooks.init_lua.is_none());
assert!(hooks.before_lua.is_none());
assert_eq!(hooks.after_lua.as_deref(), Some("USER CONTENT\n"));
}
#[test]
fn parse_and_validate_drops_invalid_fresh_keeps_valid_merged() {
let response = r#"
<rvpm:plugin_entry>
this is not valid TOML at all
</rvpm:plugin_entry>
<rvpm:plugin_entry_merged>
[[plugins]]
url = "owner/repo"
</rvpm:plugin_entry_merged>
<rvpm:explanation>fresh broken, merged ok</rvpm:explanation>
"#;
let p = parse_and_validate(response).unwrap();
assert!(
p.plugin_entry.fresh.is_none(),
"invalid fresh must be dropped to None"
);
assert!(
p.plugin_entry.merged.is_some(),
"valid merged must be retained"
);
}
#[test]
fn parse_and_validate_drops_invalid_merged_keeps_valid_fresh() {
let response = r#"
<rvpm:plugin_entry>
[[plugins]]
url = "owner/repo"
</rvpm:plugin_entry>
<rvpm:plugin_entry_merged>
also not valid TOML
</rvpm:plugin_entry_merged>
<rvpm:explanation>fresh ok, merged broken</rvpm:explanation>
"#;
let p = parse_and_validate(response).unwrap();
assert!(p.plugin_entry.fresh.is_some());
assert!(p.plugin_entry.merged.is_none());
}
#[test]
fn parse_and_validate_errors_when_both_variants_invalid() {
let response = r#"
<rvpm:plugin_entry>
not toml
</rvpm:plugin_entry>
<rvpm:plugin_entry_merged>
also not toml
</rvpm:plugin_entry_merged>
<rvpm:explanation>both broken</rvpm:explanation>
"#;
assert!(parse_and_validate(response).is_err());
}
#[test]
fn parse_and_validate_drops_merged_when_identical_to_fresh() {
let response = r#"
<rvpm:plugin_entry>
[[plugins]]
url = "owner/repo"
on_cmd = ["Foo"]
</rvpm:plugin_entry>
<rvpm:plugin_entry_merged>
[[plugins]]
url = "owner/repo"
on_cmd = ["Foo"]
</rvpm:plugin_entry_merged>
<rvpm:after_lua>
require('foo').setup({})
</rvpm:after_lua>
<rvpm:after_lua_merged>
require('foo').setup({})
</rvpm:after_lua_merged>
<rvpm:explanation>identical fresh+merged</rvpm:explanation>
"#;
let p = parse_and_validate(response).unwrap();
assert!(p.plugin_entry.fresh.is_some());
assert!(
p.plugin_entry.merged.is_none(),
"duplicate plugin_entry_merged should be dropped"
);
assert!(p.after_lua.fresh.is_some());
assert!(
p.after_lua.merged.is_none(),
"duplicate after_lua_merged should be dropped"
);
}
#[test]
fn parse_and_validate_keeps_merged_when_meaningfully_different() {
let response = r#"
<rvpm:plugin_entry>
[[plugins]]
url = "owner/repo"
on_cmd = ["Foo"]
</rvpm:plugin_entry>
<rvpm:plugin_entry_merged>
[[plugins]]
url = "owner/repo"
on_cmd = ["Foo", "Bar"]
rev = "v1.0"
</rvpm:plugin_entry_merged>
<rvpm:explanation>genuinely different</rvpm:explanation>
"#;
let p = parse_and_validate(response).unwrap();
assert!(p.plugin_entry.fresh.is_some());
assert!(p.plugin_entry.merged.is_some());
}
#[test]
fn essentially_same_ignores_whitespace_only_differences() {
assert!(essentially_same(
" vim.g.x = 1\n vim.g.y = 2\n",
"vim.g.x = 1\n\n\nvim.g.y = 2",
));
}
#[test]
fn essentially_same_distinguishes_real_differences() {
assert!(!essentially_same("vim.g.x = 1", "vim.g.x = 2",));
assert!(!essentially_same(
"require('foo').setup({})",
"require('foo').setup({ enabled = true })",
));
}
}