use crate::ai::prompt::{build_followup_prompt, build_initial_prompt, collect_plugins_tree};
use crate::ai::{
Backend, Proposal, ensure_cli_installed, invoke_oneshot, parse_proposal, run_handoff,
validate_proposal_toml, write_hook_files,
};
use anyhow::{Context, Result};
use std::path::{Path, PathBuf};
pub enum ChatOutcome {
Applied { written_hooks: Vec<PathBuf> },
Skipped,
HandedOff,
}
#[allow(clippy::too_many_arguments)]
pub async fn run_ai_add(
backend: Backend,
plugin_url: &str,
plugin_root: &Path,
plugin_config_dir: &Path,
config_root: &Path,
user_config_toml_path: &Path,
ai_language: &str,
chezmoi_enabled: bool,
) -> Result<AiAddOutcome> {
ensure_cli_installed(backend)?;
let (user_config_toml, user_plugins_tree) =
collect_user_context(user_config_toml_path, config_root)?;
let initial_prompt = build_initial_prompt(
plugin_url,
plugin_root,
&user_config_toml,
&user_plugins_tree,
ai_language,
)?;
eprintln!(
"\u{1f916} Asking {} about {} (this may take a moment)...",
backend.label(),
plugin_url
);
run_chat_loop(
backend,
initial_prompt,
plugin_config_dir,
user_config_toml_path,
chezmoi_enabled,
)
.await
}
#[allow(clippy::too_many_arguments)]
pub async fn run_ai_tune(
backend: Backend,
plugin_url: &str,
plugin_root: &Path,
plugin_config_dir: &Path,
config_root: &Path,
user_config_toml_path: &Path,
current_entry_toml: &str,
ai_language: &str,
chezmoi_enabled: bool,
) -> Result<AiAddOutcome> {
ensure_cli_installed(backend)?;
let (user_config_toml, user_plugins_tree) =
collect_user_context(user_config_toml_path, config_root)?;
let initial_prompt = crate::ai::prompt::build_tune_prompt(
plugin_url,
plugin_root,
current_entry_toml,
&user_config_toml,
&user_plugins_tree,
ai_language,
)?;
eprintln!(
"\u{1f916} Asking {} to tune {} (this may take a moment)...",
backend.label(),
plugin_url
);
run_chat_loop(
backend,
initial_prompt,
plugin_config_dir,
user_config_toml_path,
chezmoi_enabled,
)
.await
}
fn collect_user_context(
user_config_toml_path: &Path,
config_root: &Path,
) -> Result<(String, String)> {
let toml = std::fs::read_to_string(user_config_toml_path)
.with_context(|| format!("failed to read {}", user_config_toml_path.display()))?;
let tree = collect_plugins_tree(config_root);
Ok((toml, tree))
}
async fn run_chat_loop(
backend: Backend,
initial_prompt: String,
plugin_config_dir: &Path,
user_config_toml_path: &Path,
chezmoi_enabled: bool,
) -> Result<AiAddOutcome> {
let mut last_prompt = initial_prompt.clone();
let first_response = invoke_oneshot(backend, &initial_prompt).await?;
let mut proposal = parse_and_validate(&first_response)?;
drop(first_response);
loop {
print_proposal_preview(&proposal, plugin_config_dir, user_config_toml_path);
match prompt_chat_action().await? {
ChatAction::Apply => {
let written =
write_hook_files(plugin_config_dir, &proposal, chezmoi_enabled).await?;
return Ok(AiAddOutcome {
outcome: ChatOutcome::Applied {
written_hooks: written,
},
proposal: Some(proposal),
});
}
ChatAction::Skip => {
return Ok(AiAddOutcome {
outcome: ChatOutcome::Skipped,
proposal: None,
});
}
ChatAction::HandOff => {
run_handoff(backend, &last_prompt).await?;
return Ok(AiAddOutcome {
outcome: ChatOutcome::HandedOff,
proposal: None,
});
}
ChatAction::Chat => {
let followup = ask_followup().await?;
if followup.trim().is_empty() {
eprintln!("(empty feedback, returning to action menu)");
continue;
}
let prior_xml = proposal_to_xml(&proposal);
last_prompt = build_followup_prompt(&initial_prompt, &prior_xml, &followup);
eprintln!(
"\u{1f916} Asking {} for an updated proposal...",
backend.label()
);
let next_response = invoke_oneshot(backend, &last_prompt).await?;
proposal = parse_and_validate(&next_response)?;
}
}
}
}
pub struct AiAddOutcome {
pub outcome: ChatOutcome,
pub proposal: Option<Proposal>,
}
#[derive(Clone, Copy)]
enum ChatAction {
Apply,
Chat,
HandOff,
Skip,
}
async fn prompt_chat_action() -> Result<ChatAction> {
use dialoguer::{Select, theme::ColorfulTheme};
let result = tokio::task::spawn_blocking(|| {
let choices = [
"Apply (write to config.toml + create hook files)",
"Chat (refine with feedback)",
"Hand off to native CLI (rvpm exits, AI continues directly)",
"Skip (discard proposal, no changes)",
];
Select::with_theme(&ColorfulTheme::default())
.with_prompt("How should we proceed?")
.items(choices.as_slice())
.default(0)
.interact()
})
.await
.context("failed to join blocking dialoguer task")??;
Ok(match result {
0 => ChatAction::Apply,
1 => ChatAction::Chat,
2 => ChatAction::HandOff,
_ => ChatAction::Skip,
})
}
async fn ask_followup() -> Result<String> {
use std::io::{self, BufRead, Write};
tokio::task::spawn_blocking(|| -> Result<String> {
eprint!("? Your feedback for the AI: ");
io::stderr().flush().ok();
let mut buf = String::new();
io::stdin()
.lock()
.read_line(&mut buf)
.context("failed to read user input")?;
Ok(buf.trim_end_matches(['\r', '\n']).to_string())
})
.await
.context("failed to join blocking input task")?
}
fn parse_and_validate(response: &str) -> Result<Proposal> {
let proposal = parse_proposal(response)?;
validate_proposal_toml(&proposal.plugin_entry_toml)?;
Ok(proposal)
}
fn proposal_to_xml(p: &Proposal) -> String {
let mut out = String::with_capacity(p.plugin_entry_toml.len() + p.explanation.len() + 256);
out.push_str("<rvpm:plugin_entry>\n");
out.push_str(&p.plugin_entry_toml);
out.push_str("\n</rvpm:plugin_entry>\n");
for (tag, body) in [
("init_lua", p.init_lua.as_deref()),
("before_lua", p.before_lua.as_deref()),
("after_lua", p.after_lua.as_deref()),
] {
out.push_str(&format!(
"<rvpm:{tag}>\n{}\n</rvpm:{tag}>\n",
body.unwrap_or("(none)")
));
}
out.push_str("<rvpm:explanation>\n");
out.push_str(&p.explanation);
out.push_str("\n</rvpm:explanation>\n");
out
}
fn print_proposal_preview(p: &Proposal, plugin_config_dir: &Path, config_toml_path: &Path) {
let rule = "\u{2500}".repeat(60);
eprintln!();
eprintln!(
"\u{1f4dd} [[plugins]] entry to merge into {}:",
config_toml_path.display()
);
eprintln!("{rule}");
for line in p.plugin_entry_toml.lines() {
eprintln!(" {line}");
}
let mut skipped: Vec<String> = Vec::new();
print_hook_section(
p.init_lua.as_deref(),
plugin_config_dir,
"init.lua",
&rule,
&mut skipped,
);
print_hook_section(
p.before_lua.as_deref(),
plugin_config_dir,
"before.lua",
&rule,
&mut skipped,
);
print_hook_section(
p.after_lua.as_deref(),
plugin_config_dir,
"after.lua",
&rule,
&mut skipped,
);
eprintln!();
eprintln!("\u{1f4ad} AI explanation:");
for line in p.explanation.lines() {
eprintln!(" {line}");
}
if !skipped.is_empty() {
eprintln!();
eprintln!("{rule}");
eprintln!(
"\u{26a0}\u{fe0f} HEADS UP — Apply will SKIP {} existing hook file(s):",
skipped.len()
);
for path in &skipped {
eprintln!(" [SKIPPED] {path}");
}
eprintln!(" Your existing edits are preserved. To apply the AI proposal,");
eprintln!(" delete the file first or merge manually.");
eprintln!("{rule}");
}
eprintln!();
}
fn print_hook_section(
body: Option<&str>,
plugin_dir: &Path,
name: &str,
rule: &str,
skipped: &mut Vec<String>,
) {
let Some(body) = body else { return };
let path = plugin_dir.join(name);
let exists = path.exists();
let line_count = body.lines().count();
eprintln!();
if exists {
eprintln!(
"\u{26a0}\u{fe0f} [SKIPPED] {} already exists ({} line proposal preserved for reference):",
path.display(),
line_count,
);
skipped.push(path.display().to_string());
} else {
eprintln!(
"\u{1f195} Will create {} ({} lines):",
path.display(),
line_count
);
}
eprintln!("{rule}");
for line in body.lines().take(20) {
eprintln!(" {line}");
}
if line_count > 20 {
eprintln!(" ... ({} more lines)", line_count - 20);
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn proposal_to_xml_emits_all_tags_with_present_lua() {
let p = Proposal {
plugin_entry_toml: "[[plugins]]\nurl = \"o/r\"".to_string(),
init_lua: Some("vim.g.x = 1".to_string()),
before_lua: None,
after_lua: Some("require('o').setup({})".to_string()),
explanation: "two sentence explanation here.".to_string(),
};
let xml = proposal_to_xml(&p);
assert!(xml.contains("<rvpm:plugin_entry>"));
assert!(xml.contains("</rvpm:plugin_entry>"));
assert!(xml.contains("<rvpm:init_lua>"));
assert!(xml.contains("vim.g.x = 1"));
assert!(xml.contains("<rvpm:before_lua>\n(none)\n</rvpm:before_lua>"));
assert!(xml.contains("require('o').setup"));
assert!(xml.contains("two sentence explanation here."));
}
#[test]
fn proposal_to_xml_round_trips_through_parse_proposal() {
let original = Proposal {
plugin_entry_toml: "[[plugins]]\nurl = \"a/b\"\non_cmd = [\"X\"]".to_string(),
init_lua: Some("a = 1".to_string()),
before_lua: None,
after_lua: None,
explanation: "expl".to_string(),
};
let xml = proposal_to_xml(&original);
let reparsed = crate::ai::parse_proposal(&xml).unwrap();
assert_eq!(
reparsed.plugin_entry_toml.trim(),
original.plugin_entry_toml
);
assert_eq!(reparsed.init_lua, original.init_lua);
assert_eq!(reparsed.before_lua, None); assert_eq!(reparsed.after_lua, None);
assert_eq!(reparsed.explanation, original.explanation);
}
#[test]
fn proposal_to_xml_is_smaller_than_typical_raw_response() {
let p = Proposal {
plugin_entry_toml: "[[plugins]]\nurl = \"o/r\"".to_string(),
init_lua: None,
before_lua: None,
after_lua: None,
explanation: "Brief".to_string(),
};
let compact = proposal_to_xml(&p);
assert!(
compact.len() < 1024,
"compact xml unexpectedly large: {} bytes",
compact.len()
);
}
}