use aquaregia::{GenerateTextRequest, LlmClient};
const DEFAULT_DEEPSEEK_BASE_URL: &str = "https://api.deepseek.com";
const DEFAULT_DEEPSEEK_MODEL: &str = "deepseek-chat";
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let api_key = std::env::var("DEEPSEEK_API_KEY")?;
let base_url = std::env::var("DEEPSEEK_BASE_URL")
.unwrap_or_else(|_| DEFAULT_DEEPSEEK_BASE_URL.to_string());
let model =
std::env::var("DEEPSEEK_MODEL").unwrap_or_else(|_| DEFAULT_DEEPSEEK_MODEL.to_string());
let client = LlmClient::openai_compatible(base_url)
.api_key(api_key)
.build()?;
let prompt = r#"
You are a senior Rust reviewer.
Summarize the key ownership/lifetime pitfalls in 5 bullet points,
and give one quick fix tip for each point.
"#;
let response = client
.generate(GenerateTextRequest::from_user_prompt(model, prompt))
.await?;
println!("=== one-shot result ===");
println!("{}", response.output_text);
println!("\nfinish_reason: {:?}", response.finish_reason);
println!(
"usage: input={} output={} total={}",
response.usage.input_tokens, response.usage.output_tokens, response.usage.total_tokens
);
Ok(())
}