use blockless_sdk::llm::*;
fn main() {
let mut llm = BlocklessLlm::new(Models::Custom(
"Llama-3.1-8B-Instruct-q4f16_1-MLC".to_string(),
))
.unwrap();
llm.set_options(LlmOptions::default().with_tools_sse_urls(vec![
"http://localhost:3001/sse".to_string(),
"http://localhost:3002/sse".to_string(),
]))
.unwrap();
let response = llm
.chat_request("Add the following numbers: 1215, 2213")
.unwrap();
println!("llm Response: {}", response);
let response = llm.chat_request("Multiply 1215 by 2213").unwrap();
println!("llm Response: {}", response);
}