1use clap::Parser;
2use colored::*;
3use directories::ProjectDirs;
4use gemini_client_rs::{
5 types::{GenerateContentRequest, PartResponse},
6 GeminiClient,
7};
8use prompt::SYSTEM_PROMPT;
9use rustyline::{error::ReadlineError, Config, DefaultEditor};
10use serde_json::json;
11use std::{
12 borrow::Cow,
13 error::Error,
14 fs,
15 io::{self, Write},
16 path::PathBuf,
17 process::Command,
18};
19use types::{GeminiResponse, Message, NexShConfig};
20
21use crate::{available_models::list_available_models, prompt::EXPLANATION_PROMPT};
22use indicatif::{ProgressBar, ProgressStyle};
23pub mod available_models;
24pub mod prompt;
25pub mod types;
26
27#[derive(Parser, Debug)]
28#[command(
29 name = "nexsh",
30 version = "0.8.1",
31 about = "Next-generation AI-powered shell using Google Gemini"
32)]
33struct Args {
34 #[arg(short, long)]
36 init: bool,
37
38 #[arg(short, long)]
40 execute: Option<String>,
41}
42
43impl Default for NexShConfig {
44 fn default() -> Self {
45 Self {
46 api_key: String::new(),
47 history_size: 1000,
48 max_context_messages: 100,
49 model: Some("gemini-2.0-flash".to_string()),
50 }
51 }
52}
53
54pub struct NexSh {
55 config: NexShConfig,
56 config_dir: PathBuf,
57 history_file: PathBuf,
58 context_file: PathBuf,
59 client: GeminiClient,
60 editor: DefaultEditor,
61 messages: Vec<Message>,
62}
63
64impl NexSh {
65 fn set_progress_message(&self, message: impl Into<Cow<'static, str>>) -> ProgressBar {
67 let pb = ProgressBar::new_spinner();
68 let spinner_style = ProgressStyle::with_template("{spinner} {wide_msg}")
69 .unwrap()
70 .tick_chars("⠁⠂⠄⡀⢀⠠⠐⠈ ");
71 pb.set_style(spinner_style);
72 pb.enable_steady_tick(std::time::Duration::from_millis(30));
73 pb.set_message(message);
74 pb
75 }
76 pub fn set_model(&mut self, model: &str) -> Result<(), Box<dyn Error>> {
78 self.config.model = Some(model.to_string());
79 self.save_config()?;
80 println!("✅ Gemini model set to: {}", model.green());
81 Ok(())
82 }
83 pub fn new() -> Result<Self, Box<dyn Error>> {
84 let proj_dirs = ProjectDirs::from("com", "gemini-shell", "nexsh")
85 .ok_or("Failed to get project directories")?;
86
87 let config_dir = proj_dirs.config_dir().to_path_buf();
88 fs::create_dir_all(&config_dir)?;
89
90 let config_file = config_dir.join("nexsh_config.json");
91 let history_file = config_dir.join("nexsh_history.txt");
92 let context_file = config_dir.join("nexsh_context.json");
93
94 let config = if config_file.exists() {
95 let content = fs::read_to_string(&config_file)?;
96 let parsed: serde_json::Value = serde_json::from_str(&content)?;
97 NexShConfig {
98 api_key: parsed
99 .get("api_key")
100 .and_then(|v| v.as_str())
101 .unwrap_or("")
102 .to_string(),
103 history_size: parsed
104 .get("history_size")
105 .and_then(|v| v.as_u64())
106 .unwrap_or(1000) as usize,
107 max_context_messages: parsed
108 .get("max_context_messages")
109 .and_then(|v| v.as_u64())
110 .unwrap_or(100) as usize,
111 model: parsed
112 .get("model")
113 .and_then(|v| v.as_str())
114 .map(|s| s.to_string())
115 .or(Some("gemini-2.0-flash".to_string())),
116 }
117 } else {
118 NexShConfig::default()
119 };
120
121 let messages = if context_file.exists() {
122 let content = fs::read_to_string(&context_file)?;
123 serde_json::from_str(&content)?
124 } else {
125 Vec::new()
126 };
127 let editor_config = Config::builder()
128 .max_history_size(config.history_size)?
129 .build();
130 let mut editor = DefaultEditor::with_config(editor_config)?;
131 if history_file.exists() {
132 let _ = editor.load_history(&history_file);
133 }
134
135 let client = GeminiClient::new(config.api_key.clone());
136
137 Ok(Self {
138 config,
139 config_dir,
140 history_file,
141 context_file,
142 client,
143 editor,
144 messages,
145 })
146 }
147
148 fn save_config(&self) -> Result<(), Box<dyn Error>> {
149 let config_file = self.config_dir.join("nexsh_config.json");
150 let content = serde_json::to_string_pretty(&self.config)?;
151 fs::write(config_file, content)?;
152 Ok(())
153 }
154
155 fn save_context(&self) -> Result<(), Box<dyn Error>> {
156 let content = serde_json::to_string_pretty(&self.messages)?;
157 fs::write(&self.context_file, content)?;
158 Ok(())
159 }
160
161 fn add_message(&mut self, role: &str, content: &str) {
162 let message = Message {
163 role: role.to_string(),
164 content: content.to_string(),
165 timestamp: std::time::SystemTime::now()
166 .duration_since(std::time::UNIX_EPOCH)
167 .unwrap()
168 .as_secs(),
169 };
170
171 self.messages.push(message);
172
173 if self.messages.len() > self.config.max_context_messages {
175 self.messages = self
176 .messages
177 .split_off(self.messages.len() - self.config.max_context_messages);
178 }
179
180 let _ = self.save_context();
181 }
182
183 pub fn initialize(&mut self) -> Result<(), Box<dyn Error>> {
184 println!("🤖 Welcome to NexSh Setup!");
185
186 let input = self
187 .editor
188 .readline("Enter your Gemini API key (leave blank to keep current if exist): ")?;
189 let api_key = input.trim();
190 if !api_key.is_empty() {
191 self.config.api_key = api_key.to_string();
192 }
193
194 if let Ok(input) = self.editor.readline("Enter history size (default 1000): ") {
195 if let Ok(size) = input.trim().parse() {
196 self.config.history_size = size;
197 }
198 }
199
200 if let Ok(input) = self
201 .editor
202 .readline("Enter max context messages (default 100): ")
203 {
204 if let Ok(size) = input.trim().parse() {
205 self.config.max_context_messages = size;
206 }
207 }
208
209 let models = list_available_models();
211 println!("Available Gemini models:");
212 for (i, m) in models.iter().enumerate() {
213 println!(" {}. {}", i + 1, m);
214 }
215 let input = self
216 .editor
217 .readline("Select Gemini model by number or name (default 1): ")?;
218 let model = input.trim();
219 let selected = if model.is_empty() {
220 models[0]
221 } else if let Ok(idx) = model.parse::<usize>() {
222 models
223 .get(idx.saturating_sub(1))
224 .copied()
225 .unwrap_or(models[0])
226 } else {
227 models
228 .iter()
229 .find(|m| m.starts_with(model))
230 .copied()
231 .unwrap_or(models[0])
232 };
233 self.config.model = Some(selected.to_string());
234 self.save_config()?;
235 println!("✅ Configuration saved successfully!");
236 Ok(())
237 }
238
239 pub async fn process_command(&mut self, input: &str) -> Result<(), Box<dyn Error>> {
240 if self.config.api_key.is_empty() {
241 self.initialize()?;
242 }
243
244 let os = std::env::consts::OS.to_string();
245 let prompt = SYSTEM_PROMPT.replace("{OS}", &os);
246
247 self.add_message("user", input);
248
249 let mut contents = Vec::new();
251
252 for msg in &self.messages {
254 contents.push(json!({
255 "parts": [{
256 "text": msg.content
257 }],
258 "role": msg.role
259 }));
260 }
261
262 let req_json = json!({
263 "generationConfig": {
264 "responseMimeType": "application/json",
265 "responseSchema": {
266 "type": "object",
267 "required": ["message", "command", "dangerous", "category"],
268 "properties": {
269 "message": {
270 "type": "string",
271 "description": "Clear, concise message with relevant emoji",
272 "minLength": 1
273 },
274 "command": {
275 "type": "string",
276 "description": "Shell command to execute, empty if no action needed"
277 },
278 "dangerous": {
279 "type": "boolean",
280 "description": "True if command could be potentially harmful"
281 },
282 "category": {
283 "type": "string",
284 "description": "Classification of the command type",
285 "enum": ["system", "file", "network", "package", "text", "process", "other"]
286 }
287 }
288 },
289 },
290 "system_instruction": {
291 "parts": [
292 {
293 "text": prompt
294 }
295 ],
296 "role": "system"
297 },
298 "contents": contents,
299 "tools": []
300 });
301
302 let pb = self.set_progress_message("Thinking...".yellow().to_string());
303 let request: GenerateContentRequest = serde_json::from_value(req_json)?;
304 let model = self.config.model.as_deref().unwrap_or("gemini-2.0-flash");
305 let response = self.client.generate_content(model, &request).await?;
306 pb.finish_and_clear();
307 if let Some(candidates) = response.candidates {
308 for candidate in &candidates {
309 for part in &candidate.content.parts {
310 if let PartResponse::Text(json_str) = part {
311 let clean_json = json_str
313 .trim()
314 .trim_start_matches("```json")
315 .trim_end_matches("```")
316 .trim();
317
318 match serde_json::from_str::<GeminiResponse>(clean_json) {
319 Ok(response) => {
320 println!("{} {}", "🤖 →".green(), response.message.yellow());
321 if response.command.is_empty() {
322 self.add_message("model", &format!("{}", response.message));
324 return Ok(());
325 } else {
326 self.editor.add_history_entry(&response.command)?;
327 }
328 println!(
329 "{} {}",
330 "Category : ".green(),
331 response.category.yellow()
332 );
333 println!("{} {}", "→".blue(), response.command);
334 self.add_message(
335 "model",
336 &format!(
337 "Command:{}, message:{}",
338 response.command, response.message
339 ),
340 );
341
342 if !response.dangerous || self.confirm_execution()? {
343 let pb = self.set_progress_message(
344 "Running command...".green().to_string(),
345 );
346 let output = self.execute_command(&response.command)?;
347 pb.finish_and_clear();
348 if !output.is_empty() {
350 self.add_message(
351 "model",
352 &format!("Command output:\n{}", output),
353 );
354 }
355 } else {
356 println!("Command execution cancelled.");
357 }
358 }
359 Err(e) => {
360 eprintln!("Failed to parse response: {}", e);
361 println!("Raw response: {}", clean_json);
362
363 if cfg!(debug_assertions) {
364 println!(
365 "Debug: Response contains markdown block: {}",
366 json_str.contains("```")
367 );
368 println!("Debug: Cleaned JSON: {}", clean_json);
369 }
370 }
371 }
372 }
373 }
374 }
375 }
376 Ok(())
377 }
378
379 fn confirm_execution(&mut self) -> io::Result<bool> {
380 let _input = self
381 .editor
382 .readline(&("? Execute? [y/N]: ".red().to_string()))
383 .map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
384 print!("{}️", "⚠️".red());
385 if _input.trim() == "N" || _input.trim() == "n" {
386 return Ok(false);
387 }
388 let _input = self
389 .editor
390 .readline(
391 &(" Execute potentially dangerous command? [y/N]: "
392 .red()
393 .to_string()),
394 )
395 .map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
396 Ok(_input.trim().to_lowercase() == "y")
397 }
398
399 fn execute_command(&self, command: &str) -> Result<String, Box<dyn Error>> {
400 #[cfg(target_os = "windows")]
401 let (program, args) = ("cmd", vec!["/C", command]);
402
403 #[cfg(not(target_os = "windows"))]
404 let (program, args) = ("sh", vec!["-c", command]);
405
406 let output = Command::new(program).args(args).output()?;
407
408 io::stdout().write_all(&output.stdout)?;
409
410 if !output.status.success() {
411 println!("{} {}", "⚠️ Command failed:".red(), command.yellow());
412 println!(
413 "{} {}",
414 "Exit code:".red(),
415 output.status.code().unwrap_or(-1).to_string().yellow()
416 );
417 let error_message = format!(
418 "Command failed with exit code: {}",
419 output.status.code().unwrap_or(-1)
420 );
421
422 let pb = self.set_progress_message("Requesting AI analysis...".blue().to_string());
424
425 let command_clone = command.to_string();
426 let error_message_clone = error_message.clone();
427 let client_clone = GeminiClient::new(self.config.api_key.clone());
428 let model = self
429 .config
430 .model
431 .clone()
432 .unwrap_or_else(|| "gemini-2.0-flash".to_string());
433 tokio::spawn(async move {
434 let prompt = EXPLANATION_PROMPT
435 .replace("{COMMAND}", &command_clone)
436 .replace("{ERROR}", &error_message_clone);
437
438 let req_json = json!({"contents": [{
439 "parts": [{
440 "text": prompt
441 }],
442 "role": "user"
443 }],
444 "tools": []
445 });
446
447 let request: GenerateContentRequest = serde_json::from_value(req_json).unwrap();
448 if let Ok(response) = client_clone.generate_content(&model, &request).await {
449 if let Some(candidates) = response.candidates {
450 for candidate in &candidates {
451 for part in &candidate.content.parts {
452 if let PartResponse::Text(explanation) = part {
453 pb.finish_and_clear();
454 println!(
455 "{} {}",
456 "🤖 AI Explanation:".green(),
457 explanation.yellow()
458 );
459 }
460 }
461 }
462 } else {
463 pb.finish_and_clear();
464 println!("{}", "No AI explanation available.".red());
465 }
466 } else {
467 pb.finish_and_clear();
468 println!("{}", "Failed to get AI explanation.".red());
469 }
470 });
471
472 return Err(error_message.into());
473 }
474 Ok(String::from_utf8(output.stdout)?)
475 }
476
477 fn clear_context(&mut self) -> Result<(), Box<dyn Error>> {
478 self.messages.clear();
479 self.save_context()?;
480 println!("{}", "🧹 Conversation context cleared".green());
481 Ok(())
482 }
483
484 pub fn print_help(&self) -> Result<(), Box<dyn Error>> {
485 println!("🤖 NexSh Help:");
486 println!(" - Type 'exit' or 'quit' to exit the shell.");
487 println!(" - Type any command to execute it.");
488 println!(" - Use 'init' to set up your API key.");
489 println!(" - Use 'clear' to clear conversation context.");
490 println!(" - Type 'models' to list and select available Gemini models interactively.");
491 Ok(())
492 }
493
494 pub async fn run(&mut self) -> Result<(), Box<dyn Error>> {
495 println!("🤖 Welcome to NexSh!");
496
497 loop {
498 let current_dir = std::env::current_dir()?.display().to_string();
499 let prompt = format!(
500 "→ {} {} ",
501 current_dir
502 .split(std::path::MAIN_SEPARATOR)
503 .map(|s| s.bright_cyan().to_string())
504 .collect::<Vec<_>>()
505 .join(&format!(
506 "{}",
507 std::path::MAIN_SEPARATOR.to_string().bright_black()
508 )),
509 "NexSh →".green()
510 );
511 match self.editor.readline(&prompt) {
512 Ok(line) => {
513 let input = line.trim();
514 if input.is_empty() {
515 continue;
516 }
517
518 if input == "models" {
519 let models = list_available_models();
520 println!("Available Gemini models:");
521 for (i, m) in models.iter().enumerate() {
522 println!(" {}. {}", i + 1, m);
523 }
524 let input = self
525 .editor
526 .readline("Select model by number or name (Enter to cancel): ")
527 .unwrap_or_default();
528 let model = input.trim();
529 if !model.is_empty() {
530 let selected = if let Ok(idx) = model.parse::<usize>() {
531 models
532 .get(idx.saturating_sub(1))
533 .copied()
534 .unwrap_or(models[0])
535 } else {
536 models
537 .iter()
538 .find(|m| m.starts_with(model))
539 .copied()
540 .unwrap_or(models[0])
541 };
542 if let Err(e) = self.set_model(selected) {
543 eprintln!("{} {}", "error:".red(), e);
544 }
545 }
546 continue;
547 }
548
549 match input {
550 "exit" | "quit" => break,
551 "clear" => self.clear_context()?,
552 "init" => self.initialize()?,
553 "help" => self.print_help()?,
554 _ => {
555 if let Err(e) = self.process_command(input).await {
556 eprintln!("{} {}", "error:".red(), e);
557 }
558 }
559 }
560 }
561 Err(ReadlineError::Interrupted) => {
562 println!("Use 'exit' to quit");
563 continue;
564 }
565 Err(ReadlineError::Eof) => {
566 break;
567 }
568 Err(err) => {
569 eprintln!("Error: {}", err);
570 break;
571 }
572 }
573 }
574
575 self.editor.save_history(&self.history_file)?;
576 Ok(())
577 }
578}