pub struct BlocklessLlm { /* private fields */ }Implementations§
Source§impl BlocklessLlm
impl BlocklessLlm
Sourcepub fn new(model: Models) -> Result<Self, LlmErrorKind>
pub fn new(model: Models) -> Result<Self, LlmErrorKind>
Examples found in repository?
examples/llm-mcp.rs (lines 7-9)
5fn main() {
6 // large model
7 let mut llm = BlocklessLlm::new(Models::Custom(
8 "Llama-3.1-8B-Instruct-q4f16_1-MLC".to_string(),
9 ))
10 .unwrap();
11
12 // Assume we have two tools running on different ports
13 // 1. http://localhost:3001/sse - add
14 // 2. http://localhost:3002/sse - multiply
15 llm.set_options(LlmOptions::default().with_tools_sse_urls(vec![
16 "http://localhost:3001/sse".to_string(),
17 "http://localhost:3002/sse".to_string(),
18 ]))
19 .unwrap();
20
21 let response = llm
22 .chat_request("Add the following numbers: 1215, 2213")
23 .unwrap();
24 println!("llm Response: {}", response);
25
26 let response = llm.chat_request("Multiply 1215 by 2213").unwrap();
27 println!("llm Response: {}", response);
28}More examples
examples/llm.rs (line 11)
9fn main() {
10 // large model
11 let mut llm = BlocklessLlm::new(Models::Mistral7BInstructV03(None)).unwrap();
12
13 // small model
14 let mut llm_small = BlocklessLlm::new(Models::Llama321BInstruct(None)).unwrap();
15
16 let prompt = r#"
17 You are a helpful assistant.
18 First time I ask, you name will be lucy.
19 Second time I ask, you name will be bob.
20 "#;
21 llm.set_options(LlmOptions::default().with_system_message(prompt.to_string()))
22 .unwrap();
23
24 let response = llm.chat_request("What is your name?").unwrap();
25 println!("llm Response: {}", response);
26
27 let prompt_smol = r#"
28 You are a helpful assistant.
29 First time I ask, you name will be daisy.
30 Second time I ask, you name will be hector.
31 "#;
32 llm_small
33 .set_options(LlmOptions::default().with_system_message(prompt_smol.to_string()))
34 .unwrap();
35
36 let response = llm_small.chat_request("What is your name?").unwrap();
37 println!("llm_small Response: {}", response);
38
39 let response = llm_small.chat_request("What is your name?").unwrap();
40 println!("llm_small Response: {}", response);
41
42 // test if same instance is used in host/runtime
43 let response = llm.chat_request("What is your name?").unwrap();
44 println!("llm Response: {}", response);
45}pub fn handle(&self) -> u32
pub fn get_model(&self) -> Result<String, LlmErrorKind>
pub fn set_model(&mut self, model_name: &str) -> Result<(), LlmErrorKind>
pub fn get_options(&self) -> Result<LlmOptions, LlmErrorKind>
Sourcepub fn set_options(&mut self, options: LlmOptions) -> Result<(), LlmErrorKind>
pub fn set_options(&mut self, options: LlmOptions) -> Result<(), LlmErrorKind>
Examples found in repository?
examples/llm-mcp.rs (lines 15-18)
5fn main() {
6 // large model
7 let mut llm = BlocklessLlm::new(Models::Custom(
8 "Llama-3.1-8B-Instruct-q4f16_1-MLC".to_string(),
9 ))
10 .unwrap();
11
12 // Assume we have two tools running on different ports
13 // 1. http://localhost:3001/sse - add
14 // 2. http://localhost:3002/sse - multiply
15 llm.set_options(LlmOptions::default().with_tools_sse_urls(vec![
16 "http://localhost:3001/sse".to_string(),
17 "http://localhost:3002/sse".to_string(),
18 ]))
19 .unwrap();
20
21 let response = llm
22 .chat_request("Add the following numbers: 1215, 2213")
23 .unwrap();
24 println!("llm Response: {}", response);
25
26 let response = llm.chat_request("Multiply 1215 by 2213").unwrap();
27 println!("llm Response: {}", response);
28}More examples
examples/llm.rs (line 21)
9fn main() {
10 // large model
11 let mut llm = BlocklessLlm::new(Models::Mistral7BInstructV03(None)).unwrap();
12
13 // small model
14 let mut llm_small = BlocklessLlm::new(Models::Llama321BInstruct(None)).unwrap();
15
16 let prompt = r#"
17 You are a helpful assistant.
18 First time I ask, you name will be lucy.
19 Second time I ask, you name will be bob.
20 "#;
21 llm.set_options(LlmOptions::default().with_system_message(prompt.to_string()))
22 .unwrap();
23
24 let response = llm.chat_request("What is your name?").unwrap();
25 println!("llm Response: {}", response);
26
27 let prompt_smol = r#"
28 You are a helpful assistant.
29 First time I ask, you name will be daisy.
30 Second time I ask, you name will be hector.
31 "#;
32 llm_small
33 .set_options(LlmOptions::default().with_system_message(prompt_smol.to_string()))
34 .unwrap();
35
36 let response = llm_small.chat_request("What is your name?").unwrap();
37 println!("llm_small Response: {}", response);
38
39 let response = llm_small.chat_request("What is your name?").unwrap();
40 println!("llm_small Response: {}", response);
41
42 // test if same instance is used in host/runtime
43 let response = llm.chat_request("What is your name?").unwrap();
44 println!("llm Response: {}", response);
45}Sourcepub fn chat_request(&self, prompt: &str) -> Result<String, LlmErrorKind>
pub fn chat_request(&self, prompt: &str) -> Result<String, LlmErrorKind>
Examples found in repository?
examples/llm-mcp.rs (line 22)
5fn main() {
6 // large model
7 let mut llm = BlocklessLlm::new(Models::Custom(
8 "Llama-3.1-8B-Instruct-q4f16_1-MLC".to_string(),
9 ))
10 .unwrap();
11
12 // Assume we have two tools running on different ports
13 // 1. http://localhost:3001/sse - add
14 // 2. http://localhost:3002/sse - multiply
15 llm.set_options(LlmOptions::default().with_tools_sse_urls(vec![
16 "http://localhost:3001/sse".to_string(),
17 "http://localhost:3002/sse".to_string(),
18 ]))
19 .unwrap();
20
21 let response = llm
22 .chat_request("Add the following numbers: 1215, 2213")
23 .unwrap();
24 println!("llm Response: {}", response);
25
26 let response = llm.chat_request("Multiply 1215 by 2213").unwrap();
27 println!("llm Response: {}", response);
28}More examples
examples/llm.rs (line 24)
9fn main() {
10 // large model
11 let mut llm = BlocklessLlm::new(Models::Mistral7BInstructV03(None)).unwrap();
12
13 // small model
14 let mut llm_small = BlocklessLlm::new(Models::Llama321BInstruct(None)).unwrap();
15
16 let prompt = r#"
17 You are a helpful assistant.
18 First time I ask, you name will be lucy.
19 Second time I ask, you name will be bob.
20 "#;
21 llm.set_options(LlmOptions::default().with_system_message(prompt.to_string()))
22 .unwrap();
23
24 let response = llm.chat_request("What is your name?").unwrap();
25 println!("llm Response: {}", response);
26
27 let prompt_smol = r#"
28 You are a helpful assistant.
29 First time I ask, you name will be daisy.
30 Second time I ask, you name will be hector.
31 "#;
32 llm_small
33 .set_options(LlmOptions::default().with_system_message(prompt_smol.to_string()))
34 .unwrap();
35
36 let response = llm_small.chat_request("What is your name?").unwrap();
37 println!("llm_small Response: {}", response);
38
39 let response = llm_small.chat_request("What is your name?").unwrap();
40 println!("llm_small Response: {}", response);
41
42 // test if same instance is used in host/runtime
43 let response = llm.chat_request("What is your name?").unwrap();
44 println!("llm Response: {}", response);
45}Trait Implementations§
Source§impl Clone for BlocklessLlm
impl Clone for BlocklessLlm
Source§fn clone(&self) -> BlocklessLlm
fn clone(&self) -> BlocklessLlm
Returns a duplicate of the value. Read more
1.0.0 · Source§const fn clone_from(&mut self, source: &Self)
const fn clone_from(&mut self, source: &Self)
Performs copy-assignment from
source. Read moreSource§impl Debug for BlocklessLlm
impl Debug for BlocklessLlm
Source§impl Default for BlocklessLlm
impl Default for BlocklessLlm
Source§fn default() -> BlocklessLlm
fn default() -> BlocklessLlm
Returns the “default value” for a type. Read more
Source§impl<'de> Deserialize<'de> for BlocklessLlm
impl<'de> Deserialize<'de> for BlocklessLlm
Source§fn deserialize<__D>(__deserializer: __D) -> Result<Self, __D::Error>where
__D: Deserializer<'de>,
fn deserialize<__D>(__deserializer: __D) -> Result<Self, __D::Error>where
__D: Deserializer<'de>,
Deserialize this value from the given Serde deserializer. Read more
Source§impl Drop for BlocklessLlm
impl Drop for BlocklessLlm
Auto Trait Implementations§
impl Freeze for BlocklessLlm
impl RefUnwindSafe for BlocklessLlm
impl Send for BlocklessLlm
impl Sync for BlocklessLlm
impl Unpin for BlocklessLlm
impl UnwindSafe for BlocklessLlm
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more