vtcode_core/commands/
ask.rs1use crate::config::models::ModelId;
4use crate::config::types::AgentConfig;
5use crate::gemini::models::SystemInstruction;
6use crate::gemini::{Content, GenerateContentRequest};
7use crate::llm::make_client;
8use crate::prompts::{generate_lightweight_instruction, read_system_prompt_from_md};
9use anyhow::Result;
10
11pub async fn handle_ask_command(config: AgentConfig, prompt: Vec<String>) -> Result<()> {
13 let model_id = config
14 .model
15 .parse::<ModelId>()
16 .map_err(|_| anyhow::anyhow!("Invalid model: {}", config.model))?;
17 let mut client = make_client(config.api_key.clone(), model_id);
18 let prompt_text = prompt.join(" ");
19
20 if config.verbose {
21 println!("Sending prompt to {}: {}", config.model, prompt_text);
22 }
23
24 let contents = vec![Content::user_text(prompt_text)];
25 let lightweight_instruction = generate_lightweight_instruction();
26
27 let system_instruction = if let Some(part) = lightweight_instruction.parts.first() {
29 if let Some(text) = part.as_text() {
30 SystemInstruction::new(text)
31 } else {
32 SystemInstruction::new(
33 read_system_prompt_from_md()
34 .unwrap_or_else(|_| "You are a helpful coding assistant.".to_string()),
35 )
36 }
37 } else {
38 SystemInstruction::new(
39 read_system_prompt_from_md()
40 .unwrap_or_else(|_| "You are a helpful coding assistant.".to_string()),
41 )
42 };
43
44 let request = GenerateContentRequest {
45 contents,
46 tools: None,
47 tool_config: None,
48 generation_config: None,
49 system_instruction: Some(system_instruction),
50 reasoning_config: None,
51 };
52
53 let prompt = request
55 .contents
56 .iter()
57 .map(|content| {
58 content
59 .parts
60 .iter()
61 .map(|part| match part {
62 crate::gemini::Part::Text { text } => text.clone(),
63 _ => String::new(),
64 })
65 .collect::<Vec<_>>()
66 .join("\n")
67 })
68 .collect::<Vec<_>>()
69 .join("\n\n");
70
71 let response = client.generate(&prompt).await?;
72
73 println!("{}", response.content);
75
76 Ok(())
77}