use anyhow::Result;
use clap::Args;
use octomind::config::Config;
use octomind::session::{chat_completion_with_provider, ChatCompletionProviderParams, Message};
use serde::{Deserialize, Serialize};
use std::fs::OpenOptions;
use std::io::{self, Read, Write};
fn add_to_shell_history(command: &str) -> Result<()> {
let shell = std::env::var("SHELL").unwrap_or_else(|_| "/bin/bash".to_string());
let home = std::env::var("HOME")?;
let history_file = if let Ok(histfile) = std::env::var("HISTFILE") {
histfile
} else if shell.contains("zsh") {
format!("{}/.zsh_history", home)
} else if shell.contains("bash") {
format!("{}/.bash_history", home)
} else if shell.contains("fish") {
format!("{}/.local/share/fish/fish_history", home)
} else {
format!("{}/.bash_history", home)
};
let history_entry = if shell.contains("zsh") {
let timestamp = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs();
format!(": {}:0;{}\n", timestamp, command)
} else if shell.contains("fish") {
let timestamp = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs();
format!("- cmd: {}\n when: {}\n", command, timestamp)
} else {
format!("{}\n", command)
};
match OpenOptions::new()
.create(true)
.append(true)
.open(&history_file)
{
Ok(mut file) => {
let _ = file.write_all(history_entry.as_bytes());
let _ = file.flush();
}
Err(_) => {
}
}
Ok(())
}
#[derive(Args, Debug)]
pub struct ShellArgs {
#[arg(value_name = "DESCRIPTION")]
pub description: Option<String>,
#[arg(long)]
pub model: Option<String>,
#[arg(long)]
pub max_tokens: Option<u32>,
#[arg(long, short)]
pub yes: bool,
#[arg(long)]
pub temperature: Option<f32>,
}
#[derive(Serialize, Deserialize, Debug)]
struct ShellResponse {
command: String,
explanation: String,
safety_notes: Option<String>,
}
pub async fn execute(args: &ShellArgs, config: &Config) -> Result<()> {
let description = if let Some(desc) = &args.description {
desc.clone()
} else {
let mut buffer = String::new();
io::stdin().read_to_string(&mut buffer)?;
buffer.trim().to_string()
};
if description.is_empty() {
octomind::log_error!(
"Error: No description provided. Use argument or pipe description to stdin."
);
std::process::exit(1);
}
let model = args
.model
.clone()
.unwrap_or_else(|| config.get_effective_model());
let temperature = args.temperature.unwrap_or(config.shell.temperature);
let mut clean_config = config.clone();
clean_config.mcp.servers.clear();
let base_system_prompt = &config.shell.system;
let current_dir = std::env::current_dir().unwrap_or_else(|_| std::path::PathBuf::from("."));
let system_prompt = crate::session::helper_functions::process_placeholders_async(
base_system_prompt,
¤t_dir,
)
.await;
let user_prompt = format!(
"Generate a shell command for: {}\n\n\
Please respond with a JSON object containing:\n\
- \"command\": the exact shell command to execute\n\
- \"explanation\": brief explanation of what the command does\n\
- \"safety_notes\": optional warnings if the command is potentially dangerous\n\n\
Only respond with the JSON object, no other text.",
description
);
let messages = vec![
Message {
role: "system".to_string(),
content: system_prompt,
timestamp: std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs(),
cached: false,
..Default::default()
},
Message {
role: "user".to_string(),
content: user_prompt,
timestamp: std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs(),
cached: false,
..Default::default()
},
];
let response = chat_completion_with_provider(ChatCompletionProviderParams {
messages: &messages,
model: &model,
temperature,
top_p: clean_config.shell.top_p,
top_k: clean_config.shell.top_k,
max_tokens: args
.max_tokens
.unwrap_or_else(|| clean_config.get_effective_max_tokens()),
config: &clean_config,
max_retries: 0, cancellation_token: None,
})
.await?;
let shell_response: ShellResponse = match serde_json::from_str(&response.content) {
Ok(resp) => resp,
Err(_) => {
let content = response.content.trim();
if let Some(json_start) = content.find('{') {
if let Some(json_end) = content.rfind('}') {
let json_part = &content[json_start..=json_end];
match serde_json::from_str::<ShellResponse>(json_part) {
Ok(resp) => resp,
Err(_) => {
octomind::log_error!(
"Error: Could not parse AI response as structured command."
);
octomind::log_error!("Raw response: {}", response.content);
std::process::exit(1);
}
}
} else {
octomind::log_error!(
"Error: Could not parse AI response as structured command."
);
octomind::log_error!("Raw response: {}", response.content);
std::process::exit(1);
}
} else {
octomind::log_error!("Error: Could not parse AI response as structured command.");
octomind::log_error!("Raw response: {}", response.content);
std::process::exit(1);
}
}
};
println!("📝 Command: {}", shell_response.command);
println!("💡 Explanation: {}", shell_response.explanation);
if let Some(safety_notes) = &shell_response.safety_notes {
use colored::*;
println!("⚠️ Safety notes: {}", safety_notes.yellow());
}
if !args.yes {
print!("\n❓ Execute this command? [y/N]: ");
io::Write::flush(&mut io::stdout())?;
let mut input = String::new();
io::stdin().read_line(&mut input)?;
let input = input.trim().to_lowercase();
if input != "y" && input != "yes" {
println!("❌ Command execution cancelled.");
return Ok(());
}
}
println!("\n🚀 Executing: {}", shell_response.command);
let _ = add_to_shell_history(&shell_response.command);
let status = std::process::Command::new("sh")
.arg("-c")
.arg(&shell_response.command)
.status()?;
if !status.success() {
use colored::Colorize;
println!(
"❌ Command failed with exit code: {}",
status.code().unwrap_or(-1).to_string().red()
);
std::process::exit(status.code().unwrap_or(1));
}
Ok(())
}