1use crate::{
4 config::Config,
5 core::chat::{
6 create_authenticated_client, send_chat_request_with_streaming,
7 send_chat_request_with_validation,
8 },
9 database::Database,
10 debug_log,
11};
12use anyhow::Result;
13
14pub async fn handle_direct(
16 prompt: String,
17 provider: Option<String>,
18 model: Option<String>,
19 system_prompt: Option<String>,
20 max_tokens: Option<String>,
21 temperature: Option<String>,
22 _attachments: Vec<String>,
23 _images: Vec<String>,
24 _audio_files: Vec<String>,
25 tools: Option<String>,
26 _vectordb: Option<String>,
27 use_search: Option<String>,
28 stream: bool,
29) -> Result<()> {
30 debug_log!(
31 "Handling direct prompt - provider: {:?}, model: {:?}, prompt length: {}",
32 provider,
33 model,
34 prompt.len()
35 );
36 debug_log!(
37 "Request options - max_tokens: {:?}, temperature: {:?}, stream: {}",
38 max_tokens,
39 temperature,
40 stream
41 );
42
43 let mut config = Config::load()?;
45
46 let (provider_name, model_name) = determine_provider_and_model(&config, provider, model)?;
48
49 debug_log!(
50 "Using provider: '{}', model: '{}'",
51 provider_name,
52 model_name
53 );
54
55 debug_log!(
57 "Creating authenticated client for provider '{}'",
58 provider_name
59 );
60 let client = create_authenticated_client(&mut config, &provider_name).await?;
61
62 let max_tokens_parsed = max_tokens.as_ref().and_then(|s| s.parse().ok());
64 let temperature_parsed = temperature.as_ref().and_then(|s| s.parse().ok());
65
66 let api_model_name = if model_name.contains(':') {
69 let parts: Vec<&str> = model_name.splitn(2, ':').collect();
71 if parts.len() > 1 {
72 parts[1].to_string()
73 } else {
74 model_name.clone()
75 }
76 } else {
77 model_name.clone()
78 };
79
80 debug_log!("Using API model name: '{}'", api_model_name);
81
82 let final_prompt = if let Some(search_spec) = use_search {
84 debug_log!("Processing search with spec: {}", search_spec);
85
86 let (search_provider, search_query) = if search_spec.contains(':') {
88 let parts: Vec<&str> = search_spec.splitn(2, ':').collect();
89 if parts.len() == 2 {
90 (parts[0].to_string(), parts[1].to_string())
91 } else {
92 (search_spec, prompt.clone())
93 }
94 } else {
95 (search_spec, prompt.clone())
96 };
97
98 debug_log!("Search provider: '{}', query: '{}'", search_provider, search_query);
99
100 let search_engine = crate::search::SearchEngine::new()?;
102 let search_results = search_engine.search(&search_provider, &search_query, Some(5)).await?;
103
104 let search_context = search_engine.extract_context_for_llm(&search_results, 5);
106
107 let combined_prompt = format!(
109 "{}\n\nUser's question: {}",
110 search_context,
111 prompt
112 );
113
114 debug_log!("Added search context, combined prompt length: {}", combined_prompt.len());
115 println!("🔍 Search completed: {} results from {}\n", search_results.results.len(), search_provider);
116
117 combined_prompt
118 } else {
119 prompt.clone()
120 };
121
122 let (mcp_tools, mcp_server_names) = if let Some(tools_str) = &tools {
124 crate::core::tools::fetch_mcp_tools(tools_str).await?
125 } else {
126 (None, Vec::new())
127 };
128
129 if stream {
131 debug_log!("Sending streaming chat request");
132 let db = Database::new()?;
135 let _session_id = match db.get_current_session_id()? {
136 Some(id) => {
137 debug_log!("Using existing session for streaming: {}", id);
138 id
139 }
140 None => {
141 let new_session_id = uuid::Uuid::new_v4().to_string();
143 db.set_current_session_id(&new_session_id)?;
144 debug_log!("Created new session for streaming: {}", new_session_id);
145 new_session_id
146 }
147 };
148
149 send_chat_request_with_streaming(
150 &client,
151 &api_model_name,
152 &final_prompt,
153 &[], system_prompt.as_deref(),
155 max_tokens_parsed,
156 temperature_parsed,
157 &provider_name,
158 mcp_tools.clone(),
159 )
160 .await?;
161
162 eprintln!("\nNote: Streaming responses are not saved to conversation history.");
165 } else {
166 debug_log!("Sending non-streaming chat request");
167
168 let (response, input_tokens, output_tokens) = if mcp_tools.is_some() && !mcp_server_names.is_empty() {
170 let server_refs: Vec<&str> = mcp_server_names.iter().map(|s| s.as_str()).collect();
172
173 crate::core::chat::send_chat_request_with_tool_execution(
174 &client,
175 &api_model_name,
176 &final_prompt,
177 &[], system_prompt.as_deref(),
179 max_tokens_parsed,
180 temperature_parsed,
181 &provider_name,
182 mcp_tools.clone(),
183 &server_refs,
184 )
185 .await?
186 } else {
187 send_chat_request_with_validation(
188 &client,
189 &api_model_name,
190 &final_prompt,
191 &[], system_prompt.as_deref(),
193 max_tokens_parsed,
194 temperature_parsed,
195 &provider_name,
196 mcp_tools.clone(),
197 )
198 .await?
199 };
200
201 println!("{}", response);
203
204 if let Err(e) = save_to_database(
206 &prompt,
207 &response,
208 &provider_name,
209 &api_model_name,
210 input_tokens,
211 output_tokens,
212 )
213 .await
214 {
215 debug_log!("Failed to save to database: {}", e);
216 }
217 }
218
219 Ok(())
220}
221
222pub async fn handle_with_piped_input(
224 prompt: String,
225 provider: Option<String>,
226 model: Option<String>,
227 system_prompt: Option<String>,
228 max_tokens: Option<String>,
229 temperature: Option<String>,
230 _attachments: Vec<String>,
231 _images: Vec<String>,
232 _audio_files: Vec<String>,
233 _tools: Option<String>,
234 _vectordb: Option<String>,
235 _use_search: Option<String>,
236 stream: bool,
237) -> Result<()> {
238 debug_log!("Handling piped input as direct prompt");
240 handle_direct(
241 prompt,
242 provider,
243 model,
244 system_prompt,
245 max_tokens,
246 temperature,
247 vec![],
248 vec![],
249 vec![],
250 None,
251 None,
252 None,
253 stream,
254 )
255 .await
256}
257
258fn determine_provider_and_model(
260 config: &Config,
261 provider: Option<String>,
262 model: Option<String>,
263) -> Result<(String, String)> {
264 debug_log!(
265 "Determining provider and model - provider: {:?}, model: {:?}",
266 provider,
267 model
268 );
269
270 if let Some(ref m) = model {
272 if let Some(alias_target) = config.get_alias(m) {
273 debug_log!("Resolved alias '{}' to '{}'", m, alias_target);
274 if alias_target.contains(':') {
276 let parts: Vec<&str> = alias_target.splitn(2, ':').collect();
277 if parts.len() == 2 {
278 let provider_from_alias = parts[0].to_string();
279 let model_from_alias = alias_target.clone();
280
281 if let Some(ref p) = provider {
283 if p != &provider_from_alias {
284 anyhow::bail!("Provider mismatch: -p {} conflicts with alias '{}' which maps to {}",
285 p, m, alias_target);
286 }
287 }
288
289 debug_log!(
290 "Using provider '{}' and model '{}' from alias",
291 provider_from_alias,
292 model_from_alias
293 );
294 return Ok((provider_from_alias, model_from_alias));
295 }
296 }
297 }
298 }
299
300 if let Some(ref m) = model {
302 if m.contains(':') {
303 let parts: Vec<&str> = m.split(':').collect();
304 let provider_from_model = parts[0].to_string();
305 let model_name = m.clone();
306
307 if let Some(ref p) = provider {
309 if p != &provider_from_model {
310 anyhow::bail!(
311 "Provider mismatch: -p {} conflicts with model prefix {}",
312 p,
313 provider_from_model
314 );
315 }
316 }
317
318 debug_log!(
319 "Extracted provider '{}' from model '{}'",
320 provider_from_model,
321 model_name
322 );
323 return Ok((provider_from_model, model_name));
324 }
325 }
326
327 let provider_name = provider.unwrap_or_else(|| "openai".to_string());
329
330 let model_name = model.unwrap_or_else(|| {
332 match provider_name.as_str() {
334 "openai" => "gpt-4o-mini".to_string(),
335 "anthropic" | "claude" => "claude-3-5-sonnet-20241022".to_string(),
336 "gemini" => "gemini-1.5-flash".to_string(),
337 _ => "gpt-3.5-turbo".to_string(),
338 }
339 });
340
341 let final_model = if model_name.contains(':') {
343 model_name
344 } else {
345 format!("{}:{}", provider_name, model_name)
346 };
347
348 debug_log!(
349 "Final provider: '{}', model: '{}'",
350 provider_name,
351 final_model
352 );
353 Ok((provider_name, final_model))
354}
355
356async fn save_to_database(
358 prompt: &str,
359 response: &str,
360 _provider: &str,
361 model: &str,
362 input_tokens: Option<i32>,
363 output_tokens: Option<i32>,
364) -> Result<()> {
365 let db = Database::new()?;
366
367 let session_id = match db.get_current_session_id()? {
369 Some(id) => {
370 debug_log!("Using existing session: {}", id);
371 id
372 }
373 None => {
374 let new_session_id = uuid::Uuid::new_v4().to_string();
376 db.set_current_session_id(&new_session_id)?;
377 debug_log!("Created new session: {}", new_session_id);
378 new_session_id
379 }
380 };
381
382 db.save_chat_entry_with_tokens(
384 &session_id,
385 model,
386 prompt,
387 response,
388 input_tokens,
389 output_tokens,
390 )?;
391
392 debug_log!("Saved chat entry to database with session: {}", session_id);
393
394 Ok(())
395}