use colored::Colorize;
use dialoguer::{Input, Select, theme::ColorfulTheme};
use indicatif::{ProgressBar, ProgressStyle};
use serde::{Deserialize, Serialize};
use std::env;
use std::fs;
use std::io::{Read, Write};
use std::process::{Command, Stdio};
use std::thread;
#[derive(Serialize)]
#[allow(dead_code)]
struct OllamaChatRequest {
model: String,
messages: Vec<ChatMessage>,
stream: bool,
think: bool,
}
#[derive(Deserialize)]
struct OllamaChatResponse {
message: Option<OllamaMessage>,
}
#[derive(Deserialize)]
#[allow(dead_code)]
struct OllamaMessage {
content: Option<String>,
}
#[derive(Deserialize)]
struct OllamaModelsResponse {
models: Vec<OllamaModel>,
}
#[derive(Deserialize)]
struct OllamaModel {
name: String,
}
#[derive(Serialize)]
struct ApiChatRequest {
model: String,
messages: Vec<ChatMessage>,
}
#[derive(Deserialize)]
struct ApiChatResponse {
choices: Option<Vec<ApiChatChoice>>,
}
#[derive(Deserialize)]
struct ApiChatChoice {
message: Option<ApiChatMessage>,
}
#[derive(Deserialize)]
struct ApiChatMessage {
content: Option<String>,
}
#[derive(Serialize)]
struct ClaudeRequest {
model: String,
max_tokens: u32,
messages: Vec<ChatMessage>,
}
#[derive(Deserialize)]
struct ClaudeResponse {
content: Option<Vec<ClaudeContentBlock>>,
}
#[derive(Deserialize)]
struct ClaudeContentBlock {
text: Option<String>,
}
#[derive(Serialize)]
struct GeminiRequest {
contents: Vec<GeminiContent>,
}
#[derive(Serialize)]
struct GeminiContent {
parts: Vec<GeminiPart>,
}
#[derive(Serialize)]
struct GeminiPart {
text: String,
}
#[derive(Deserialize)]
struct GeminiResponse {
candidates: Option<Vec<GeminiCandidate>>,
}
#[derive(Deserialize)]
struct GeminiCandidate {
content: Option<GeminiResponseContent>,
}
#[derive(Deserialize)]
struct GeminiResponseContent {
parts: Option<Vec<GeminiResponsePart>>,
}
#[derive(Deserialize)]
struct GeminiResponsePart {
text: Option<String>,
}
#[derive(Serialize, Deserialize, Clone)]
struct ChatMessage {
role: String,
content: String,
}
fn main() {
let _ = dotenvy::dotenv();
let args: Vec<String> = env::args().skip(1).collect();
if args.is_empty() {
println!(
"\n {} – run any command; if it fails, AI tells you why.\n\n {} wtf <command>\n {} wtf npm run build\n {} wtf --setup\n",
"wtf".bold().cyan(),
"Usage:".dimmed(),
"Example:".dimmed(),
"Setup:".dimmed(),
);
std::process::exit(0);
}
if args.len() == 1 && args[0] == "--setup" {
run_setup();
std::process::exit(0);
}
let command = &args[0];
let command_args = &args[1..];
let full_command = args.join(" ");
let mut child = match Command::new(command)
.args(command_args)
.stdin(Stdio::inherit())
.stdout(Stdio::inherit())
.stderr(Stdio::piped())
.spawn()
{
Ok(child) => child,
Err(err) => {
eprintln!(
"{}",
format!("\n✖ Could not run \"{}\": {}", full_command, err).red()
);
std::process::exit(1);
}
};
let mut stderr_handle = child.stderr.take().expect("stderr was piped");
let stderr_thread = thread::spawn(move || {
let mut buffer = String::new();
let mut chunk = [0u8; 1024];
while let Ok(n) = stderr_handle.read(&mut chunk) {
if n == 0 {
break;
}
let s = String::from_utf8_lossy(&chunk[..n]);
eprint!("{}", s);
let _ = std::io::stderr().flush();
buffer.push_str(&s);
}
buffer
});
let status = child.wait().expect("failed to wait on child process");
let stderr_output = stderr_thread.join().unwrap_or_default();
let exit_code = status.code().unwrap_or(1);
if exit_code == 0 || stderr_output.trim().is_empty() {
std::process::exit(exit_code);
}
let spinner = ProgressBar::new_spinner();
spinner.set_style(
ProgressStyle::with_template("{spinner:.cyan} {msg}")
.unwrap()
.tick_chars("⠋⠙⠹⠸⠼⠴⠦⠧⠇⠏ "),
);
spinner.set_message("wtf is analyzing the error…".cyan().to_string());
spinner.enable_steady_tick(std::time::Duration::from_millis(80));
let files = extract_existing_files(&stderr_output);
let files_context = read_file_context(&files);
let prompt = build_prompt(&full_command, &stderr_output, &files_context);
let answer = get_ai_response(&prompt);
spinner.finish_and_clear();
if let Some((answer, provider)) = answer {
let separator = "─".repeat(50).yellow();
println!(
"\n{}\n{} {}\n",
separator,
"🤖 wtf says".bold().cyan(),
format!("({}):", provider).dimmed(),
);
termimad::print_text(&answer);
println!("\n{}\n", separator);
}
std::process::exit(exit_code);
}
fn extract_existing_files(stderr: &str) -> Vec<String> {
let mut files = std::collections::HashSet::new();
for token in stderr.split_whitespace() {
let token = token.trim_matches(|c: char| {
"()[]{}'\"`,".contains(c)
});
let mut candidates = vec![token.to_string()];
if let Some(idx) = token.find(':') {
if idx == 1 && token.len() > 2 && (token.as_bytes()[2] == b'\\' || token.as_bytes()[2] == b'/') {
if let Some(second_idx) = token[3..].find(':') {
candidates.push(token[..3 + second_idx].to_string());
}
} else {
candidates.push(token[..idx].to_string());
}
}
if let Some(idx) = token.find('(') {
candidates.push(token[..idx].to_string());
}
for candidate in candidates {
if !candidate.is_empty() {
let path = std::path::Path::new(&candidate);
if path.is_file() {
files.insert(candidate);
}
}
}
}
files.into_iter().take(3).collect()
}
fn read_file_context(files: &[String]) -> String {
let mut context = String::new();
for file in files {
if let Ok(metadata) = std::fs::metadata(file) {
if metadata.len() > 1_000_000 {
continue;
}
}
if let Ok(content) = std::fs::read_to_string(file) {
let lines: Vec<&str> = content.lines().collect();
let limited_content = if lines.len() > 250 {
let mut c = lines[0..125].join("\n");
c.push_str("\n... [content truncated] ...\n");
c.push_str(&lines[lines.len() - 125..].join("\n"));
c
} else {
content
};
context.push_str(&format!("\nFile `{}`:\n```\n{}\n```\n", file, limited_content));
}
}
context
}
fn build_prompt(cmd: &str, stderr: &str, files_context: &str) -> String {
let os_type = std::env::consts::OS;
let os_arch = std::env::consts::ARCH;
let shell = std::env::var("SHELL")
.or_else(|_| std::env::var("COMSPEC"))
.unwrap_or_else(|_| "unknown".to_string());
let files_section = if files_context.is_empty() {
String::new()
} else {
format!("\nRelevant file contents:\n{}\n", files_context)
};
format!(
"You are a helpful terminal assistant. A command just failed on this machine.\n\
\n\
OS: {} ({})\n\
Shell: {}\n\
Command: {}\n\
\n\
stderr output:\n\
```\n\
{}\n\
```\n{}\
\n\
Provide a structured output formatted in Markdown, using these exact headings:\n\
### What is wrong\n (A short explanation of the cause)\n\
### What can be done to help\n (The exact steps / code to fix it)\n\
### File to change\n (Specify which file needs to be changed, if any)",
os_type,
os_arch,
shell,
cmd,
stderr.trim(),
files_section
)
}
fn run_setup() {
println!(
"\n{}",
"🔧 wtf setup".bold().cyan()
);
println!(
"{}\n",
"Select your preferred AI provider:".dimmed()
);
let providers = &[
"Ollama – Local, free, and private (requires Ollama installed)",
"OpenAI – Cloud (requires OPENAI_API_KEY)",
"Claude – Cloud (requires CLAUDE_API_KEY)",
"Gemini – Cloud (requires GEMINI_API_KEY)",
"OpenRouter – Cloud (requires OPENROUTER_API_KEY)",
];
let selection = Select::with_theme(&ColorfulTheme::default())
.items(providers)
.default(0)
.interact();
let selection = match selection {
Ok(s) => s,
Err(_) => {
println!("\n{}", "Setup cancelled.".yellow());
return;
}
};
let provider_name = match selection {
0 => "ollama",
1 => "openai",
2 => "claude",
3 => "gemini",
4 => "openrouter",
_ => unreachable!(),
};
let env_path = ".env";
let mut contents = fs::read_to_string(env_path).unwrap_or_default();
contents = update_env_var(&contents, "WTF_PROVIDER", provider_name);
let mut selected_ollama_model = None;
if selection == 0 {
let base_url = env::var("OLLAMA_HOST").unwrap_or_else(|_| "http://localhost:11434".to_string());
match ureq::get(&format!("{}/api/tags", base_url))
.timeout(std::time::Duration::from_secs(5))
.call()
{
Ok(res) => {
if let Ok(data) = res.into_json::<OllamaModelsResponse>() {
let model_names: Vec<String> = data.models.into_iter().map(|m| m.name).collect();
if !model_names.is_empty() {
println!(
"\n{}\n",
"Select an Ollama model:".dimmed()
);
let model_selection = Select::with_theme(&ColorfulTheme::default())
.items(&model_names)
.default(0)
.interact();
if let Ok(ms) = model_selection {
let model_name = &model_names[ms];
selected_ollama_model = Some(model_name.clone());
contents = update_env_var(&contents, "OLLAMA_MODEL", model_name);
}
}
}
}
Err(_) => {
println!(
"\n{} {}",
"⚠".yellow(),
"Could not connect to Ollama to list models. Using default.".dimmed()
);
}
}
}
if let Err(e) = fs::write(env_path, &contents) {
eprintln!(
"\n{} {}",
"✖ Could not write .env:".red(),
e.to_string().red()
);
return;
}
let display_name = match selection {
0 => "Ollama",
1 => "OpenAI",
2 => "Claude",
3 => "Gemini",
4 => "OpenRouter",
_ => unreachable!(),
};
println!(
"\n{} {} {}",
"✔".green().bold(),
"Provider set to".bold(),
display_name.cyan().bold()
);
if let Some(ref model) = selected_ollama_model {
println!(
"{} {} {}",
"✔".green().bold(),
"Model set to".bold(),
model.cyan().bold()
);
}
match selection {
0 => {
if selected_ollama_model.is_none() {
println!(
"\n {}\n {}\n",
"Make sure Ollama is running and you have a model pulled:".dimmed(),
"ollama pull qwen3.5:9b".cyan()
);
}
}
1 | 2 | 3 | 4 => {
let (env_key, placeholder) = match selection {
1 => ("OPENAI_API_KEY", "sk-..."),
2 => ("CLAUDE_API_KEY", "sk-ant-..."),
3 => ("GEMINI_API_KEY", "AI..."),
4 => ("OPENROUTER_API_KEY", "sk-or-..."),
_ => unreachable!(),
};
let existing_key = env::var(env_key).unwrap_or_default();
let has_key = !existing_key.is_empty();
if has_key {
println!(
"\n{} {} {}",
"✔".green().bold(),
format!("{} is already set.", env_key).bold(),
"Press Enter to keep it, or paste a new key:".dimmed()
);
} else {
println!(
"\n{}",
format!("Paste your {} (or press Enter to skip):", env_key).dimmed()
);
}
let input: Result<String, _> = Input::with_theme(&ColorfulTheme::default())
.with_prompt(format!(" {}", env_key))
.default(if has_key { existing_key.clone() } else { String::new() })
.allow_empty(true)
.show_default(false)
.with_initial_text("")
.interact_text();
if let Ok(key) = input {
let key = key.trim().to_string();
if !key.is_empty() {
contents = update_env_var(&contents, env_key, &key);
if let Err(e) = fs::write(env_path, &contents) {
eprintln!(
"\n{} {}",
"✖ Could not write .env:".red(),
e.to_string().red()
);
return;
}
println!(
"{} {} {}",
"✔".green().bold(),
format!("{} saved to .env", env_key).bold(),
format!("({}...)", &key[..key.len().min(8)]).dimmed()
);
} else if !has_key {
println!(
"\n {}\n {}\n",
format!("Add your API key to .env later:").dimmed(),
format!("{}={}", env_key, placeholder).cyan()
);
}
}
}
_ => {}
}
}
fn update_env_var(contents: &str, key: &str, value: &str) -> String {
let prefix = format!("{}=", key);
let mut lines: Vec<String> = contents.lines().map(|l| l.to_string()).collect();
let mut found = false;
for line in lines.iter_mut() {
if line.starts_with(&prefix) {
*line = format!("{}={}", key, value);
found = true;
break;
}
}
if !found {
lines.push(format!("{}={}", key, value));
}
let mut result = lines.join("\n");
if !result.ends_with('\n') && !result.is_empty() {
result.push('\n');
}
result
}
fn get_ai_response(prompt: &str) -> Option<(String, String)> {
let provider = env::var("WTF_PROVIDER")
.unwrap_or_default()
.to_lowercase();
match provider.as_str() {
"ollama" => {
if let Some((answer, model)) = try_ollama(prompt) {
return Some((answer, format!("Ollama – {}", model)));
}
eprintln!(
"\n{}",
"✖ Ollama failed. Is it running? Try: ollama serve".red()
);
return None;
}
"openai" => {
if let Some((answer, model)) = try_openai(prompt) {
return Some((answer, format!("OpenAI – {}", model)));
}
eprintln!(
"\n{}",
"✖ OpenAI failed. Check your OPENAI_API_KEY in .env".red()
);
return None;
}
"claude" => {
if let Some((answer, model)) = try_claude(prompt) {
return Some((answer, format!("Claude – {}", model)));
}
eprintln!(
"\n{}",
"✖ Claude failed. Check your CLAUDE_API_KEY in .env".red()
);
return None;
}
"gemini" => {
if let Some((answer, model)) = try_gemini(prompt) {
return Some((answer, format!("Gemini – {}", model)));
}
eprintln!(
"\n{}",
"✖ Gemini failed. Check your GEMINI_API_KEY in .env".red()
);
return None;
}
"openrouter" => {
if let Some((answer, model)) = try_openrouter(prompt) {
return Some((answer, format!("OpenRouter – {}", model)));
}
eprintln!(
"\n{}",
"✖ OpenRouter failed. Check your OPENROUTER_API_KEY in .env".red()
);
return None;
}
_ => {
if let Some((answer, model)) = try_ollama(prompt) {
return Some((answer, format!("Ollama – {}", model)));
}
if let Some((answer, model)) = try_openai(prompt) {
return Some((answer, format!("OpenAI – {}", model)));
}
if let Some((answer, model)) = try_claude(prompt) {
return Some((answer, format!("Claude – {}", model)));
}
if let Some((answer, model)) = try_gemini(prompt) {
return Some((answer, format!("Gemini – {}", model)));
}
if let Some((answer, model)) = try_openrouter(prompt) {
return Some((answer, format!("OpenRouter – {}", model)));
}
}
}
println!(
"\n{}\n\n {}\n\n {}\n Install from {} then run:\n {}\n\n {}\n Create a {} file with:\n {}\n\n {}\n Create a {} file with:\n {}\n\n {}\n Create a {} file with:\n {}\n\n {}\n Create a {} file with:\n {}\n",
"✖ No AI provider available.".red(),
"Run 'wtf --setup' to configure a provider.".bold().cyan(),
"Option 1 – Ollama (local, free, private)".bold(),
"https://ollama.com".underline(),
"ollama pull qwen3.5:4b".cyan(),
"Option 2 – OpenAI (cloud)".bold(),
".env".cyan(),
"OPENAI_API_KEY=your_key_here".cyan(),
"Option 3 – Claude (cloud)".bold(),
".env".cyan(),
"CLAUDE_API_KEY=your_key_here".cyan(),
"Option 4 – Gemini (cloud)".bold(),
".env".cyan(),
"GEMINI_API_KEY=your_key_here".cyan(),
"Option 5 – OpenRouter (cloud)".bold(),
".env".cyan(),
"OPENROUTER_API_KEY=your_key_here".cyan(),
);
None
}
fn try_ollama(prompt: &str) -> Option<(String, String)> {
let model = env::var("OLLAMA_MODEL").unwrap_or_else(|_| "qwen3.5:9b".to_string());
let base_url = env::var("OLLAMA_HOST").unwrap_or_else(|_| "http://localhost:11434".to_string());
let body = OllamaChatRequest {
model: model.clone(),
messages: vec![ChatMessage {
role: "user".to_string(),
content: prompt.to_string(),
}],
stream: false,
think: false,
};
let res = ureq::post(&format!("{}/api/chat", base_url))
.timeout(std::time::Duration::from_secs(600))
.send_json(&body)
.ok()?;
let data: OllamaChatResponse = res.into_json().ok()?;
let content = data.message?.content?;
let trimmed = content.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some((trimmed, model))
}
}
fn try_openai(prompt: &str) -> Option<(String, String)> {
let api_key = env::var("OPENAI_API_KEY").ok()?;
let base_url =
env::var("OPENAI_API_BASE").unwrap_or_else(|_| "https://api.openai.com/v1".to_string());
let model = env::var("OPENAI_MODEL").unwrap_or_else(|_| "gpt-4.1-mini".to_string());
let body = ApiChatRequest {
model: model.clone(),
messages: vec![ChatMessage {
role: "user".to_string(),
content: prompt.to_string(),
}],
};
let res = ureq::post(&format!("{}/chat/completions", base_url))
.timeout(std::time::Duration::from_secs(30))
.set("Content-Type", "application/json")
.set("Authorization", &format!("Bearer {}", api_key))
.send_json(&body);
match res {
Ok(response) => {
let data: ApiChatResponse = response.into_json().ok()?;
let content = data.choices?.into_iter().next()?.message?.content?;
let trimmed = content.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some((trimmed, model))
}
}
Err(e) => {
if let ureq::Error::Status(code, response) = e {
let error_body = response.into_string().unwrap_or_else(|_| "unknown error".to_string());
eprintln!(
"\n{} (Status {}): {}",
"⚠ OpenAI API Error".yellow().bold(),
code,
error_body.red()
);
} else {
eprintln!("\n{} {}", "⚠ Connection Error:".yellow().bold(), e.to_string().red());
}
None
}
}
}
fn try_claude(prompt: &str) -> Option<(String, String)> {
let api_key = env::var("CLAUDE_API_KEY").ok()?;
let model = env::var("CLAUDE_MODEL").unwrap_or_else(|_| "claude-sonnet-4-20250514".to_string());
let body = ClaudeRequest {
model: model.clone(),
max_tokens: 4096,
messages: vec![ChatMessage {
role: "user".to_string(),
content: prompt.to_string(),
}],
};
let res = ureq::post("https://api.anthropic.com/v1/messages")
.timeout(std::time::Duration::from_secs(60))
.set("Content-Type", "application/json")
.set("x-api-key", &api_key)
.set("anthropic-version", "2023-06-01")
.send_json(&body);
match res {
Ok(response) => {
let data: ClaudeResponse = response.into_json().ok()?;
let content = data
.content?
.into_iter()
.filter_map(|block| block.text)
.collect::<Vec<_>>()
.join("");
let trimmed = content.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some((trimmed, model))
}
}
Err(e) => {
if let ureq::Error::Status(code, response) = e {
let error_body =
response.into_string().unwrap_or_else(|_| "unknown error".to_string());
eprintln!(
"\n{} (Status {}): {}",
"⚠ Claude API Error".yellow().bold(),
code,
error_body.red()
);
} else {
eprintln!(
"\n{} {}",
"⚠ Connection Error:".yellow().bold(),
e.to_string().red()
);
}
None
}
}
}
fn try_gemini(prompt: &str) -> Option<(String, String)> {
let api_key = env::var("GEMINI_API_KEY").ok()?;
let model =
env::var("GEMINI_MODEL").unwrap_or_else(|_| "gemini-2.0-flash".to_string());
let body = GeminiRequest {
contents: vec![GeminiContent {
parts: vec![GeminiPart {
text: prompt.to_string(),
}],
}],
};
let url = format!(
"https://generativelanguage.googleapis.com/v1beta/models/{}:generateContent?key={}",
model, api_key
);
let res = ureq::post(&url)
.timeout(std::time::Duration::from_secs(30))
.set("Content-Type", "application/json")
.send_json(&body);
match res {
Ok(response) => {
let data: GeminiResponse = response.into_json().ok()?;
let content = data
.candidates?
.into_iter()
.next()?
.content?
.parts?
.into_iter()
.next()?
.text?;
let trimmed = content.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some((trimmed, model))
}
}
Err(e) => {
if let ureq::Error::Status(code, response) = e {
let error_body =
response.into_string().unwrap_or_else(|_| "unknown error".to_string());
eprintln!(
"\n{} (Status {}): {}",
"⚠ Gemini API Error".yellow().bold(),
code,
error_body.red()
);
} else {
eprintln!(
"\n{} {}",
"⚠ Connection Error:".yellow().bold(),
e.to_string().red()
);
}
None
}
}
}
fn try_openrouter(prompt: &str) -> Option<(String, String)> {
let api_key = env::var("OPENROUTER_API_KEY").ok()?;
let model = env::var("OPENROUTER_MODEL")
.unwrap_or_else(|_| "arcee-ai/trinity-mini:free".to_string());
let body = ApiChatRequest {
model: model.clone(),
messages: vec![ChatMessage {
role: "user".to_string(),
content: prompt.to_string(),
}],
};
let res = ureq::post("https://openrouter.ai/api/v1/chat/completions")
.timeout(std::time::Duration::from_secs(30))
.set("Content-Type", "application/json")
.set("Authorization", &format!("Bearer {}", api_key))
.send_json(&body)
.ok()?;
let data: ApiChatResponse = res.into_json().ok()?;
let content = data.choices?.into_iter().next()?.message?.content?;
let trimmed = content.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some((trimmed, model))
}
}