1use clap_noun_verb::Result;
7use clap_noun_verb_macros::verb;
8use futures::StreamExt;
9use ggen_ai::config::get_global_config;
10use serde::Serialize;
11use std::path::PathBuf;
12
13#[derive(Serialize)]
18pub struct GenerateOutput {
19 generated_code: String,
20 language: Option<String>,
21 tokens_used: Option<usize>,
22 model: String,
23 finish_reason: Option<String>,
24}
25
26#[derive(Serialize)]
27pub struct ChatMessage {
28 role: String,
29 content: String,
30}
31
32#[derive(Serialize)]
33pub struct ChatOutput {
34 messages: Vec<ChatMessage>,
35 session_id: String,
36 model: String,
37 tokens_used: Option<usize>,
38}
39
40#[derive(Serialize)]
41pub struct AnalyzeOutput {
42 file_path: Option<String>,
43 insights: Vec<String>,
44 suggestions: Vec<String>,
45 complexity_score: Option<f64>,
46 model: String,
47 tokens_used: Option<usize>,
48}
49
50#[allow(clippy::too_many_arguments)] #[verb]
74fn generate(
75 prompt: String, code: Option<String>, model: Option<String>, _api_key: Option<String>,
76 suggestions: bool, language: Option<String>, max_tokens: i64, temperature: f64,
77) -> Result<GenerateOutput> {
78 crate::runtime::block_on(async move {
79 let mut global_config = get_global_config().clone();
80
81 if let Some(model_name) = &model {
83 global_config.settings.default_model = Some(model_name.clone());
84 if let Some(provider_config) = global_config.providers.get_mut(&global_config.provider)
85 {
86 provider_config.model = model_name.clone();
87 }
88 }
89
90 global_config.settings.default_max_tokens = Some(max_tokens as u32);
92 global_config.settings.default_temperature = Some(temperature as f32);
93 if let Some(provider_config) = global_config.providers.get_mut(&global_config.provider) {
94 provider_config.max_tokens = Some(max_tokens as u32);
95 provider_config.temperature = Some(temperature as f32);
96 }
97
98 let client = global_config.create_contextual_client().map_err(|e| {
100 clap_noun_verb::NounVerbError::execution_error(format!(
101 "Failed to create AI client: {}",
102 e
103 ))
104 })?;
105
106 let mut full_prompt = prompt.clone();
108
109 if let Some(lang) = &language {
110 full_prompt.push_str(&format!("\nTarget language: {}", lang));
111 }
112
113 if let Some(code) = &code {
114 full_prompt.push_str(&format!("\n\nExisting code:\n```\n{}\n```", code));
115 }
116
117 if suggestions {
118 full_prompt.push_str("\n\nInclude suggestions for improvements and best practices.");
119 }
120
121 let response = client.complete(&full_prompt).await.map_err(|e| {
123 clap_noun_verb::NounVerbError::execution_error(format!("AI generation failed: {}", e))
124 })?;
125
126 Ok(GenerateOutput {
127 generated_code: response.content,
128 language: language.clone(),
129 tokens_used: response.usage.map(|u| u.total_tokens as usize),
130 model: client.get_config().model.clone(),
131 finish_reason: response.finish_reason,
132 })
133 })
134}
135
136#[verb]
155fn chat(
156 message: Option<String>, model: Option<String>, _api_key: Option<String>, interactive: bool,
157 stream: bool, max_tokens: i64, temperature: f64,
158) -> Result<ChatOutput> {
159 use std::io::Write;
160
161 crate::runtime::block_on(async move {
162 let mut global_config = get_global_config().clone();
163
164 if let Some(model_name) = &model {
166 global_config.settings.default_model = Some(model_name.clone());
167 if let Some(provider_config) = global_config.providers.get_mut(&global_config.provider)
168 {
169 provider_config.model = model_name.clone();
170 }
171 }
172
173 global_config.settings.default_max_tokens = Some(max_tokens as u32);
175 global_config.settings.default_temperature = Some(temperature as f32);
176 if let Some(provider_config) = global_config.providers.get_mut(&global_config.provider) {
177 provider_config.max_tokens = Some(max_tokens as u32);
178 provider_config.temperature = Some(temperature as f32);
179 }
180
181 let client = global_config.create_contextual_client().map_err(|e| {
183 clap_noun_verb::NounVerbError::execution_error(format!(
184 "Failed to create AI client: {}",
185 e
186 ))
187 })?;
188
189 let session_id = uuid::Uuid::new_v4().to_string();
190 let mut messages: Vec<ChatMessage> = Vec::new();
191 let mut total_tokens: Option<usize> = None;
192 let model_name = client.get_config().model.clone();
193
194 if interactive {
195 ggen_utils::alert_info!("🤖 AI Chat - Interactive Mode");
197 ggen_utils::alert_info!("Model: {}", model_name);
198 ggen_utils::alert_info!("Type 'exit' or 'quit' to end session\n");
199
200 loop {
201 eprint!("> ");
202 std::io::stderr().flush().map_err(|e| {
203 clap_noun_verb::NounVerbError::execution_error(format!(
204 "Failed to flush stderr: {}",
205 e
206 ))
207 })?;
208
209 let mut input = String::new();
210 std::io::stdin().read_line(&mut input).map_err(|e| {
211 clap_noun_verb::NounVerbError::execution_error(format!(
212 "Failed to read input: {}",
213 e
214 ))
215 })?;
216
217 let input = input.trim();
218 if input.is_empty() {
219 continue;
220 }
221
222 if input == "exit" || input == "quit" {
223 break;
224 }
225
226 messages.push(ChatMessage {
227 role: "user".to_string(),
228 content: input.to_string(),
229 });
230
231 if stream {
232 let mut stream = client.complete_stream(input).await.map_err(|e| {
234 clap_noun_verb::NounVerbError::execution_error(format!(
235 "Streaming failed: {}",
236 e
237 ))
238 })?;
239
240 let mut full_response = String::new();
241 eprint!("🤖: ");
242 while let Some(chunk) = stream.next().await {
243 eprint!("{}", chunk.content);
244 std::io::stderr().flush().map_err(|e| {
245 clap_noun_verb::NounVerbError::execution_error(format!(
246 "Failed to flush stderr: {}",
247 e
248 ))
249 })?;
250 full_response.push_str(&chunk.content);
251
252 if let Some(usage) = chunk.usage {
253 total_tokens = Some(usage.total_tokens as usize);
254 }
255 }
256 ggen_utils::alert_info!("\n");
257
258 messages.push(ChatMessage {
259 role: "assistant".to_string(),
260 content: full_response,
261 });
262 } else {
263 let response = client.complete(input).await.map_err(|e| {
265 clap_noun_verb::NounVerbError::execution_error(format!(
266 "Chat failed: {}",
267 e
268 ))
269 })?;
270
271 ggen_utils::alert_info!("🤖: {}\n", response.content);
272
273 if let Some(usage) = response.usage {
274 total_tokens = Some(usage.total_tokens as usize);
275 }
276
277 messages.push(ChatMessage {
278 role: "assistant".to_string(),
279 content: response.content,
280 });
281 }
282 }
283 } else if let Some(msg) = message {
284 messages.push(ChatMessage {
286 role: "user".to_string(),
287 content: msg.clone(),
288 });
289
290 if stream {
291 let mut stream = client.complete_stream(&msg).await.map_err(|e| {
293 clap_noun_verb::NounVerbError::execution_error(format!(
294 "Streaming failed: {}",
295 e
296 ))
297 })?;
298
299 let mut full_response = String::new();
300 while let Some(chunk) = stream.next().await {
301 eprint!("{}", chunk.content);
302 std::io::stderr().flush().map_err(|e| {
303 clap_noun_verb::NounVerbError::execution_error(format!(
304 "Failed to flush stderr: {}",
305 e
306 ))
307 })?;
308 full_response.push_str(&chunk.content);
309
310 if let Some(usage) = chunk.usage {
311 total_tokens = Some(usage.total_tokens as usize);
312 }
313 }
314
315 messages.push(ChatMessage {
316 role: "assistant".to_string(),
317 content: full_response,
318 });
319 } else {
320 let response = client.complete(&msg).await.map_err(|e| {
322 clap_noun_verb::NounVerbError::execution_error(format!("Chat failed: {}", e))
323 })?;
324
325 if let Some(usage) = response.usage {
326 total_tokens = Some(usage.total_tokens as usize);
327 }
328
329 messages.push(ChatMessage {
330 role: "assistant".to_string(),
331 content: response.content,
332 });
333 }
334 } else {
335 return Err(clap_noun_verb::NounVerbError::execution_error(
336 "Provide a message or use --interactive for chat session",
337 ));
338 }
339
340 Ok(ChatOutput {
341 messages,
342 session_id,
343 model: model_name,
344 tokens_used: total_tokens,
345 })
346 })
347}
348
349#[allow(clippy::too_many_arguments)] #[verb]
369fn analyze(
370 code: Option<String>, file: Option<PathBuf>, project: Option<PathBuf>, model: Option<String>,
371 api_key: Option<String>, complexity: bool, security: bool, performance: bool, max_tokens: i64,
372) -> Result<AnalyzeOutput> {
373 crate::runtime::block_on(async move {
374 let (code_content, file_path) = if let Some(code_str) = code {
376 (code_str, None)
377 } else if let Some(file_path) = &file {
378 let content = std::fs::read_to_string(file_path).map_err(|e| {
379 clap_noun_verb::NounVerbError::execution_error(format!(
380 "Failed to read file: {}",
381 e
382 ))
383 })?;
384 (content, Some(file_path.display().to_string()))
385 } else if let Some(project_path) = &project {
386 return analyze_project(project_path, model, api_key, max_tokens as u32).await;
388 } else {
389 return Err(clap_noun_verb::NounVerbError::execution_error(
390 "Provide code, --file, or --project to analyze",
391 ));
392 };
393
394 let mut global_config = get_global_config().clone();
395
396 if let Some(model_name) = &model {
398 global_config.settings.default_model = Some(model_name.clone());
399 if let Some(provider_config) = global_config.providers.get_mut(&global_config.provider)
400 {
401 provider_config.model = model_name.clone();
402 }
403 }
404
405 global_config.settings.default_max_tokens = Some(max_tokens as u32);
407 global_config.settings.default_temperature = Some(0.3);
408 if let Some(provider_config) = global_config.providers.get_mut(&global_config.provider) {
409 provider_config.max_tokens = Some(max_tokens as u32);
410 provider_config.temperature = Some(0.3);
411 }
412
413 let client = global_config.create_contextual_client().map_err(|e| {
415 clap_noun_verb::NounVerbError::execution_error(format!(
416 "Failed to create AI client: {}",
417 e
418 ))
419 })?;
420
421 let mut prompt = format!(
423 "Analyze the following code and provide insights:\n\n```\n{}\n```\n\n",
424 code_content
425 );
426
427 prompt.push_str("Provide:\n");
428 prompt.push_str("1. Key insights about the code structure and design\n");
429 prompt.push_str("2. Suggestions for improvements\n");
430
431 if complexity {
432 prompt.push_str("3. Complexity analysis (cyclomatic, cognitive)\n");
433 }
434 if security {
435 prompt.push_str("4. Security considerations and potential vulnerabilities\n");
436 }
437 if performance {
438 prompt.push_str("5. Performance optimization opportunities\n");
439 }
440
441 prompt.push_str("\nFormat your response with clear sections.");
442
443 let response = client.complete(&prompt).await.map_err(|e| {
445 clap_noun_verb::NounVerbError::execution_error(format!("Analysis failed: {}", e))
446 })?;
447
448 let (insights, suggestions) = parse_analysis_response(&response.content);
450
451 let complexity_score = if complexity {
453 Some(estimate_complexity(&code_content))
454 } else {
455 None
456 };
457
458 Ok(AnalyzeOutput {
459 file_path,
460 insights,
461 suggestions,
462 complexity_score,
463 model: client.get_config().model.clone(),
464 tokens_used: response.usage.map(|u| u.total_tokens as usize),
465 })
466 })
467}
468
469async fn analyze_project(
475 project_path: &PathBuf, model: Option<String>, _api_key: Option<String>, max_tokens: u32,
476) -> Result<AnalyzeOutput> {
477 use walkdir::WalkDir;
478
479 let mut source_files = Vec::new();
481 for entry in WalkDir::new(project_path)
482 .max_depth(5)
483 .into_iter()
484 .filter_map(|e| e.ok())
485 {
486 let path = entry.path();
487 if path.is_file() {
488 if let Some(ext) = path.extension() {
489 if matches!(ext.to_str(), Some("rs") | Some("toml") | Some("md")) {
490 source_files.push(path.to_path_buf());
491 }
492 }
493 }
494 }
495
496 let mut global_config = get_global_config().clone();
498
499 if let Some(model_name) = &model {
501 global_config.settings.default_model = Some(model_name.clone());
502 if let Some(provider_config) = global_config.providers.get_mut(&global_config.provider) {
503 provider_config.model = model_name.clone();
504 }
505 }
506
507 global_config.settings.default_max_tokens = Some(max_tokens);
509 global_config.settings.default_temperature = Some(0.3);
510 if let Some(provider_config) = global_config.providers.get_mut(&global_config.provider) {
511 provider_config.max_tokens = Some(max_tokens);
512 provider_config.temperature = Some(0.3);
513 }
514
515 let client = global_config.create_contextual_client().map_err(|e| {
517 clap_noun_verb::NounVerbError::execution_error(format!("Failed to create AI client: {}", e))
518 })?;
519
520 let file_list: Vec<String> = source_files
522 .iter()
523 .map(|p| p.display().to_string())
524 .collect();
525
526 let prompt = format!(
527 "Analyze this project structure:\n\nProject: {}\n\nFiles:\n{}\n\n\
528 Provide insights about:\n\
529 1. Project architecture and organization\n\
530 2. Code quality and design patterns\n\
531 3. Suggested improvements\n\
532 4. Potential issues or technical debt",
533 project_path.display(),
534 file_list.join("\n")
535 );
536
537 let response = client.complete(&prompt).await.map_err(|e| {
539 clap_noun_verb::NounVerbError::execution_error(format!("Project analysis failed: {}", e))
540 })?;
541
542 let (insights, suggestions) = parse_analysis_response(&response.content);
544
545 Ok(AnalyzeOutput {
546 file_path: Some(project_path.display().to_string()),
547 insights,
548 suggestions,
549 complexity_score: None,
550 model: client.get_config().model.clone(),
551 tokens_used: response.usage.map(|u| u.total_tokens as usize),
552 })
553}
554
555fn parse_analysis_response(response: &str) -> (Vec<String>, Vec<String>) {
557 let mut insights = Vec::new();
558 let mut suggestions = Vec::new();
559
560 let mut current_section = "";
561 for line in response.lines() {
562 let line = line.trim();
563
564 if line.to_lowercase().contains("insight") {
566 current_section = "insights";
567 continue;
568 } else if line.to_lowercase().contains("suggestion")
569 || line.to_lowercase().contains("improvement")
570 {
571 current_section = "suggestions";
572 continue;
573 }
574
575 if !line.is_empty() && line.starts_with(|c: char| c.is_numeric() || c == '-' || c == '*') {
577 let cleaned = line
578 .trim_start_matches(|c: char| c.is_numeric() || c == '.' || c == '-' || c == '*')
579 .trim()
580 .to_string();
581
582 match current_section {
583 "insights" => insights.push(cleaned),
584 "suggestions" => suggestions.push(cleaned),
585 _ => {
586 insights.push(cleaned);
588 }
589 }
590 }
591 }
592
593 if insights.is_empty() && suggestions.is_empty() {
595 insights.push(response.to_string());
596 }
597
598 (insights, suggestions)
599}
600
601fn estimate_complexity(code: &str) -> f64 {
603 let mut complexity = 1.0;
604
605 let control_flow = ["if", "else", "match", "for", "while", "loop"];
607 for keyword in &control_flow {
608 complexity += code.matches(keyword).count() as f64;
609 }
610
611 let nesting_level = code.matches('{').count().max(1) as f64;
613 complexity *= nesting_level.log10().max(1.0);
614
615 complexity += code.matches("fn ").count() as f64 * 0.5;
617
618 (complexity.min(100.0) * 10.0).round() / 10.0
620}
621
622