Skip to main content

llava/
main.rs

1use anyhow::Result;
2use mistralrs::{IsqType, TextMessageRole, VisionMessages, VisionModelBuilder};
3
4#[tokio::main]
5async fn main() -> Result<()> {
6    let model = VisionModelBuilder::new("llava-hf/llava-1.5-7b-hf")
7        .with_isq(IsqType::Q4K)
8        .with_chat_template("chat_templates/vicuna.json")
9        .with_logging()
10        .build()
11        .await?;
12
13    let bytes = match reqwest::blocking::get(
14        "https://cdn.britannica.com/45/5645-050-B9EC0205/head-treasure-flower-disk-flowers-inflorescence-ray.jpg",
15    ) {
16        Ok(http_resp) => http_resp.bytes()?.to_vec(),
17        Err(e) => anyhow::bail!(e),
18    };
19    let image = image::load_from_memory(&bytes)?;
20
21    let messages = VisionMessages::new().add_image_message(
22        TextMessageRole::User,
23        "What is depicted here? Please describe the scene in detail.",
24        vec![image],
25        &model,
26    )?;
27
28    let response = model.send_chat_request(messages).await?;
29
30    println!("{}", response.choices[0].message.content.as_ref().unwrap());
31    dbg!(
32        response.usage.avg_prompt_tok_per_sec,
33        response.usage.avg_compl_tok_per_sec
34    );
35
36    Ok(())
37}