lc/cli/
chat.rs

1//! Chat functionality commands
2
3use anyhow::Result;
4use colored::Colorize;
5use std::io::{self, Write};
6use uuid::Uuid;
7
8use crate::config::Config;
9use crate::core::chat;
10use crate::database::Database;
11use crate::provider::{Message, MessageContent, ContentPart, ImageUrl};
12use crate::utils::{
13    cli_utils::resolve_model_and_provider,
14    input::MultiLineInput,
15};
16
17/// Handle chat command - interactive chat mode
18pub async fn handle(
19    model: Option<String>,
20    provider: Option<String>,
21    cid: Option<String>,
22    tools: Option<String>,
23    database: Option<String>,
24    debug: bool,
25    has_images: bool,
26    stream: bool,
27) -> Result<()> {
28    // Set debug mode if requested
29    if debug {
30        crate::cli::set_debug_mode(true);
31    }
32    
33    let config = Config::load()?;
34    let db = Database::new()?;
35
36    // Determine session ID
37    let session_id = cid.unwrap_or_else(|| {
38        let new_id = Uuid::new_v4().to_string();
39        db.set_current_session_id(&new_id).unwrap();
40        new_id
41    });
42
43    // Resolve provider and model
44    let (provider_name, resolved_model) = resolve_model_and_provider(&config, provider, model)?;
45    let _provider_config = config.get_provider(&provider_name)?;
46
47    let mut config_mut = config.clone();
48    let client = chat::create_authenticated_client(&mut config_mut, &provider_name).await?;
49
50    // Save config if tokens were updated
51    if config_mut.get_cached_token(&provider_name) != config.get_cached_token(&provider_name) {
52        config_mut.save()?;
53    }
54
55    // MCP tools support (placeholder for now)
56    let mcp_tools: Option<Vec<crate::provider::Tool>> = None;
57    let mcp_server_names: Vec<String> = Vec::new();
58    if tools.is_some() {
59        println!(
60            "{} MCP tools support is not yet fully implemented",
61            "⚠️".yellow()
62        );
63    }
64
65    let mut current_model = resolved_model.clone();
66
67    // Process initial images if provided (placeholder for now)
68    let mut processed_images: Vec<String> = Vec::new();
69    if has_images {
70        println!(
71            "{} Image support is not yet fully implemented",
72            "⚠️".yellow()
73        );
74    }
75
76    println!("\n{} Interactive Chat Mode", "🚀".blue());
77    println!("{} Session ID: {}", "📝".blue(), session_id);
78    println!("{} Model: {}", "🤖".blue(), current_model);
79    if !processed_images.is_empty() {
80        println!("{} Initial images: {}", "🖼️".blue(), processed_images.len());
81    }
82    if mcp_tools.is_some() && !mcp_server_names.is_empty() {
83        println!(
84            "{} Tools: {} (from MCP servers: {})",
85            "🔧".blue(),
86            mcp_tools.as_ref().unwrap().len(),
87            mcp_server_names.join(", ")
88        );
89    }
90    println!("{} Type /help for commands, /exit to quit", "💡".yellow());
91    println!(
92        "{} Use Shift+Enter or Ctrl+J for multi-line input, Enter to send\n",
93        "💡".yellow()
94    );
95
96    // Create multi-line input handler
97    let mut input_handler = MultiLineInput::new();
98
99    loop {
100        // Use multi-line input handler
101        let input_string = match input_handler.read_input(&format!("{}", "You:".bold().green())) {
102            Ok(input_text) => input_text.trim().to_string(),
103            Err(_) => {
104                // If there's an error with multi-line input, fall back to simple input
105                print!("{} ", "You:".bold().green());
106                io::stdout().flush()?;
107
108                let mut fallback_input = String::new();
109                let bytes_read = io::stdin().read_line(&mut fallback_input)?;
110
111                // If we read 0 bytes, it means EOF (e.g., when input is piped)
112                if bytes_read == 0 {
113                    println!("Goodbye! 👋");
114                    break;
115                }
116
117                fallback_input.trim().to_string()
118            }
119        };
120
121        if input_string.is_empty() {
122            continue;
123        }
124
125        let input = input_string.as_str();
126
127        // Handle chat commands
128        if input.starts_with('/') {
129            match input {
130                "/exit" | "/quit" => {
131                    println!("Goodbye! 👋");
132                    break;
133                }
134                "/clear" => {
135                    db.clear_session(&session_id)?;
136                    println!("{} Session cleared", "✓".green());
137                    continue;
138                }
139                "/help" => {
140                    println!("\n{}", "Available Commands:".bold().blue());
141                    println!("  /exit, /quit     - Exit chat session");
142                    println!("  /clear           - Clear current session");
143                    println!("  /model <name>    - Change model");
144                    println!("  /system <prompt> - Set system prompt");
145                    println!("  /help            - Show this help");
146                    println!("\n{}", "Input Controls:".bold().blue());
147                    println!("  Enter            - Send message");
148                    println!("  Shift+Enter      - New line (multi-line input)");
149                    println!("  Ctrl+J           - New line (alternative)");
150                    println!("  Ctrl+C           - Cancel current input\n");
151                    continue;
152                }
153                _ if input.starts_with("/model ") => {
154                    let new_model = input.strip_prefix("/model ").unwrap().trim();
155                    if !new_model.is_empty() {
156                        current_model = new_model.to_string();
157                        println!("{} Model changed to: {}", "✓".green(), current_model);
158                    } else {
159                        println!("{} Please specify a model name", "✗".red());
160                    }
161                    continue;
162                }
163                _ if input.starts_with("/system ") => {
164                    let new_system = input.strip_prefix("/system ").unwrap().trim();
165                    if !new_system.is_empty() {
166                        // TODO: Store and use the system prompt in the config_mut
167                        println!("{} System prompt updated", "✓".green());
168                    } else {
169                        println!("{} Please specify a system prompt", "✗".red());
170                    }
171                    continue;
172                }
173                _ => {
174                    println!(
175                        "{} Unknown command. Type /help for available commands",
176                        "✗".red()
177                    );
178                    continue;
179                }
180            }
181        }
182
183        // Send chat message
184        let history = db.get_chat_history(&session_id)?;
185
186        // RAG support (placeholder for now)
187        let enhanced_input = input.to_string();
188        if database.is_some() {
189            println!(
190                "{} Vector database RAG support is not yet fully implemented",
191                "⚠️".yellow()
192            );
193        }
194
195        // Create messages with images if we have initial images
196        let messages = if !processed_images.is_empty() {
197            // Build history messages first
198            let mut msgs: Vec<Message> = history
199                .iter()
200                .flat_map(|entry| {
201                    vec![
202                        Message::user(entry.question.clone()),
203                        Message::assistant(entry.response.clone()),
204                    ]
205                })
206                .collect();
207
208            // Add current message with images
209            let mut content_parts = vec![ContentPart::Text {
210                text: enhanced_input.clone(),
211            }];
212
213            // Add each image as a content part
214            for image_url in &processed_images {
215                content_parts.push(ContentPart::ImageUrl {
216                    image_url: ImageUrl {
217                        url: image_url.clone(),
218                        detail: Some("auto".to_string()),
219                    },
220                });
221            }
222
223            msgs.push(Message {
224                role: "user".to_string(),
225                content_type: MessageContent::Multimodal {
226                    content: content_parts,
227                },
228                tool_calls: None,
229                tool_call_id: None,
230            });
231
232            msgs
233        } else {
234            Vec::new()
235        };
236
237        // Add newline before "Thinking..." to ensure proper positioning after multi-line input
238        println!();
239        print!("{}", "Thinking...".dimmed());
240        io::stdout().flush()?;
241
242        let resolved_system_prompt = if let Some(system_prompt) = &config.system_prompt {
243            Some(config.resolve_template_or_prompt(system_prompt))
244        } else {
245            None
246        };
247
248        // Determine if streaming should be used (default to true for interactive chat)
249        let mut use_streaming = stream || config.stream.unwrap_or(true);
250
251        // Disable streaming for certain providers
252        if use_streaming {
253            if let Ok(pcfg) = config.get_provider(&provider_name) {
254                let is_gemini_like = pcfg
255                    .endpoint
256                    .to_lowercase()
257                    .contains("generativelanguage.googleapis.com");
258                if is_gemini_like {
259                    use_streaming = false;
260                }
261            }
262        }
263
264        // Handle tool execution, streaming, or regular chat
265        if mcp_tools.is_some() && !mcp_server_names.is_empty() {
266            // Tool execution (not yet fully implemented)
267            print!("\r{}\r", " ".repeat(12)); // Clear "Thinking..."
268            println!(
269                "{} Tool execution is not yet fully implemented",
270                "⚠️".yellow()
271            );
272            continue;
273        } else if use_streaming {
274            // Use streaming chat
275            print!("\r{}\r{} ", " ".repeat(12), "Assistant:".bold().blue());
276            io::stdout().flush()?;
277            
278            let result = if !messages.is_empty() {
279                chat::send_chat_request_with_streaming_messages(
280                    &client,
281                    &current_model,
282                    &messages,
283                    resolved_system_prompt.as_deref(),
284                    config.max_tokens,
285                    config.temperature,
286                    &provider_name,
287                    None,
288                )
289                .await
290            } else {
291                chat::send_chat_request_with_streaming(
292                    &client,
293                    &current_model,
294                    &enhanced_input,
295                    &history,
296                    resolved_system_prompt.as_deref(),
297                    config.max_tokens,
298                    config.temperature,
299                    &provider_name,
300                    None,
301                )
302                .await
303            };
304
305            match result {
306                Ok(_) => {
307                    // Streaming completed successfully
308                    println!();
309
310                    // Save to database with placeholder since the actual response was streamed
311                    if let Err(e) = db.save_chat_entry_with_tokens(
312                        &session_id,
313                        &current_model,
314                        &input,
315                        "[Streamed Response]",
316                        None,
317                        None,
318                    ) {
319                        eprintln!("Warning: Failed to save chat entry: {}", e);
320                    }
321
322                    // Clear processed images after first use
323                    if !processed_images.is_empty() {
324                        processed_images.clear();
325                    }
326                }
327                Err(e) => {
328                    println!("\n{} Error: {}", "✗".red(), e);
329                }
330            }
331        } else {
332            // Use regular chat
333            let result = if !messages.is_empty() {
334                chat::send_chat_request_with_validation_messages(
335                    &client,
336                    &current_model,
337                    &messages,
338                    resolved_system_prompt.as_deref(),
339                    config.max_tokens,
340                    config.temperature,
341                    &provider_name,
342                    None,
343                )
344                .await
345            } else {
346                chat::send_chat_request_with_validation(
347                    &client,
348                    &current_model,
349                    &enhanced_input,
350                    &history,
351                    resolved_system_prompt.as_deref(),
352                    config.max_tokens,
353                    config.temperature,
354                    &provider_name,
355                    None,
356                )
357                .await
358            };
359
360            match result {
361                Ok((response, input_tokens, output_tokens)) => {
362                    print!("\r{}\r", " ".repeat(12)); // Clear "Thinking..."
363                    println!("{} {}", "Assistant:".bold().blue(), response);
364
365                    // Save to database with token counts
366                    if let Err(e) = db.save_chat_entry_with_tokens(
367                        &session_id,
368                        &current_model,
369                        &input,
370                        &response,
371                        input_tokens,
372                        output_tokens,
373                    ) {
374                        eprintln!("Warning: Failed to save chat entry: {}", e);
375                    }
376
377                    // Clear processed images after first use
378                    if !processed_images.is_empty() {
379                        processed_images.clear();
380                    }
381                }
382                Err(e) => {
383                    print!("\r{}\r", " ".repeat(12)); // Clear "Thinking..."
384                    println!("{} Error: {}", "✗".red(), e);
385                }
386            }
387        }
388
389        println!(); // Add spacing
390    }
391
392    Ok(())
393}