vtcode_core/commands/
ask.rs1use crate::config::models::ModelId;
4use crate::config::types::AgentConfig;
5use crate::gemini::models::SystemInstruction;
6use crate::gemini::{Content, GenerateContentRequest};
7use crate::llm::make_client;
8use crate::prompts::{generate_lightweight_instruction, read_system_prompt_from_md};
9use anyhow::Result;
10
11pub async fn handle_ask_command(config: AgentConfig, prompt: Vec<String>) -> Result<()> {
13 let model_id = config
14 .model
15 .parse::<ModelId>()
16 .map_err(|_| anyhow::anyhow!("Invalid model: {}", config.model))?;
17 let mut client = make_client(config.api_key.clone(), model_id);
18 let prompt_text = prompt.join(" ");
19
20 if config.verbose {
21 println!("Sending prompt to {}: {}", config.model, prompt_text);
22 }
23
24 let contents = vec![Content::user_text(prompt_text)];
25 let lightweight_instruction = generate_lightweight_instruction();
26
27 let system_instruction = if let Some(part) = lightweight_instruction.parts.first() {
29 if let Some(text) = part.as_text() {
30 SystemInstruction::new(text)
31 } else {
32 SystemInstruction::new(
33 read_system_prompt_from_md()
34 .unwrap_or_else(|_| "You are a helpful coding assistant.".to_string()),
35 )
36 }
37 } else {
38 SystemInstruction::new(
39 read_system_prompt_from_md()
40 .unwrap_or_else(|_| "You are a helpful coding assistant.".to_string()),
41 )
42 };
43
44 let request = GenerateContentRequest {
45 contents,
46 tools: None,
47 tool_config: None,
48 generation_config: None,
49 system_instruction: Some(system_instruction),
50 };
51
52 let prompt = request
54 .contents
55 .iter()
56 .map(|content| {
57 content
58 .parts
59 .iter()
60 .map(|part| match part {
61 crate::gemini::Part::Text { text } => text.clone(),
62 _ => String::new(),
63 })
64 .collect::<Vec<_>>()
65 .join("\n")
66 })
67 .collect::<Vec<_>>()
68 .join("\n\n");
69
70 let response = client.generate(&prompt).await?;
71
72 println!("{}", response.content);
74
75 Ok(())
76}