llm/
llm.rs

1use blockless_sdk::llm::*;
2
3/// This example demonstrates how to use the Blockless SDK to interact with two different LLM models.
4///
5/// It sets up two instances of the BlocklessLlm struct.
6/// Each model is configured with a system message that changes the assistant's name.
7/// The example then sends chat requests to both models and prints their responses,
8/// demonstrating how the same instance maintains state between requests.
9fn main() {
10    // large model
11    let mut llm = BlocklessLlm::new(Models::Mistral7BInstructV03(None)).unwrap();
12
13    // small model
14    let mut llm_small = BlocklessLlm::new(Models::Llama321BInstruct(None)).unwrap();
15
16    let prompt = r#"
17    You are a helpful assistant.
18    First time I ask, you name will be lucy.
19    Second time I ask, you name will be bob.
20    "#;
21    llm.set_options(LlmOptions::default().with_system_message(prompt.to_string()))
22        .unwrap();
23
24    let response = llm.chat_request("What is your name?").unwrap();
25    println!("llm Response: {}", response);
26
27    let prompt_smol = r#"
28    You are a helpful assistant.
29    First time I ask, you name will be daisy.
30    Second time I ask, you name will be hector.
31    "#;
32    llm_small
33        .set_options(LlmOptions::default().with_system_message(prompt_smol.to_string()))
34        .unwrap();
35
36    let response = llm_small.chat_request("What is your name?").unwrap();
37    println!("llm_small Response: {}", response);
38
39    let response = llm_small.chat_request("What is your name?").unwrap();
40    println!("llm_small Response: {}", response);
41
42    // test if same instance is used in host/runtime
43    let response = llm.chat_request("What is your name?").unwrap();
44    println!("llm Response: {}", response);
45}