Skip to main content

lora/
main.rs

1use anyhow::Result;
2use mistralrs::{LoraModelBuilder, TextMessageRole, TextMessages, TextModelBuilder};
3
4#[tokio::main]
5async fn main() -> Result<()> {
6    let model = LoraModelBuilder::from_text_model_builder(
7        TextModelBuilder::new("meta-llama/Llama-3.2-1B-Instruct").with_logging(),
8        vec!["danielhanchen/llama-3.2-lora".to_string()],
9    )
10    .build()
11    .await?;
12
13    let messages = TextMessages::new().add_message(
14        TextMessageRole::User,
15        "Hello! How are you? Please write generic binary search function in Rust.",
16    );
17
18    let response = model.send_chat_request(messages).await?;
19
20    println!("{}", response.choices[0].message.content.as_ref().unwrap());
21    dbg!(
22        response.usage.avg_prompt_tok_per_sec,
23        response.usage.avg_compl_tok_per_sec
24    );
25
26    Ok(())
27}