pub struct Agent {
pub cache: MessageStack,
pub completion_model: CompletionModel,
}Fields§
§cache: MessageStack§completion_model: CompletionModelImplementations§
Source§impl Agent
impl Agent
Sourcepub fn new(init_prompt: Option<&str>, completion_model: CompletionModel) -> Self
pub fn new(init_prompt: Option<&str>, completion_model: CompletionModel) -> Self
For creating an Agent given optional system prompt content and model
Examples found in repository?
examples/rag.rs (lines 164-167)
160async fn main() {
161 dotenv::dotenv().ok();
162 let api_key = std::env::var("OPENAI_KEY").unwrap();
163 let embedder = EmbeddingModel::default_openai(&api_key);
164 let mut agent = Agent::new(
165 Some("You are jerry!!"),
166 CompletionModel::default_openai(&api_key),
167 );
168
169 let mut rag = RagManager {
170 embedder,
171 data: None,
172 };
173
174 rag.init_products().await;
175 // agent.insert_listener(listener);
176
177 let m = Message::new_user("I need a new fitness toy, what is the best product for me?");
178 let message_embedding = rag
179 .embed(&m.content)
180 .await
181 .expect("Failed to embed message content");
182 let relavent = rag
183 .data
184 .as_ref()
185 .unwrap()
186 .get_close_embeddings_from_query_embedding(message_embedding, 5)
187 .await;
188 println!("Got relavent structs: {relavent:#?}");
189 agent.cache.push(m);
190 agent.cache.push(relavent.to_message(MessageRole::User));
191 let response = agent.io_completion().await.unwrap();
192 println!("{:?}", response);
193}More examples
examples/summarize_at_limit.rs (lines 43-46)
40async fn main() {
41 dotenv::dotenv().ok();
42 let api_key = std::env::var("ANTHROPIC_KEY").unwrap();
43 let mut agent = Agent::new(
44 Some("You are jerry!!"),
45 CompletionModel::default_anthropic(&api_key),
46 );
47
48 let summarizer = Agent::new(
49 Some("Your job is to summarize chunks of a conversation"),
50 CompletionModel::default_anthropic(&api_key),
51 );
52 let mut sal = SummarizeAtLimit::new(5usize, summarizer);
53
54 // agent.insert_listener(sal);
55 let message = Message::new_user("im saying things to fill space");
56
57 for _ in 0..=5 {
58 // And now we use our predefined action method
59 push_to_cache_with_limit(&mut agent, &mut sal, message.clone())
60 .await
61 .unwrap();
62 }
63
64 // env.finalize_dispatch().await.unwrap();
65 println!("STACK: {:?}", agent.cache);
66 assert_eq!(agent.cache.len(), 4);
67 assert_eq!(agent.cache.as_ref()[0].role, MessageRole::System);
68 assert_eq!(agent.cache.as_ref()[1].role, MessageRole::Assistant);
69 assert_eq!(agent.cache.as_ref()[2].role, MessageRole::User);
70 println!("All asserts passed, summarize at limit working as expected");
71}Sourcepub async fn io_completion(&mut self) -> AgentResult<String>
pub async fn io_completion(&mut self) -> AgentResult<String>
Get a simple string response from a model
Examples found in repository?
examples/summarize_at_limit.rs (line 30)
18async fn push_to_cache_with_limit(
19 agent: &mut Agent,
20 sum: &mut SummarizeAtLimit,
21 m: Message,
22) -> AgentResult<()> {
23 if agent.cache.len() >= sum.limit {
24 let message = Message::new_user(&format!(
25 "Summarize this chat history: {}",
26 agent.cache.to_string()
27 ));
28 sum.summarizer.cache.push(message);
29
30 let summary = sum.summarizer.io_completion().await?;
31
32 agent.cache.mut_filter_by(&MessageRole::System, true);
33 agent.cache.push(Message::new_assistant(&summary));
34 }
35 agent.cache.push(m);
36 Ok(())
37}More examples
examples/rag.rs (line 191)
160async fn main() {
161 dotenv::dotenv().ok();
162 let api_key = std::env::var("OPENAI_KEY").unwrap();
163 let embedder = EmbeddingModel::default_openai(&api_key);
164 let mut agent = Agent::new(
165 Some("You are jerry!!"),
166 CompletionModel::default_openai(&api_key),
167 );
168
169 let mut rag = RagManager {
170 embedder,
171 data: None,
172 };
173
174 rag.init_products().await;
175 // agent.insert_listener(listener);
176
177 let m = Message::new_user("I need a new fitness toy, what is the best product for me?");
178 let message_embedding = rag
179 .embed(&m.content)
180 .await
181 .expect("Failed to embed message content");
182 let relavent = rag
183 .data
184 .as_ref()
185 .unwrap()
186 .get_close_embeddings_from_query_embedding(message_embedding, 5)
187 .await;
188 println!("Got relavent structs: {relavent:#?}");
189 agent.cache.push(m);
190 agent.cache.push(relavent.to_message(MessageRole::User));
191 let response = agent.io_completion().await.unwrap();
192 println!("{:?}", response);
193}Sourcepub async fn stream_completion(&mut self) -> AgentResult<ProviderStreamHandler>
pub async fn stream_completion(&mut self) -> AgentResult<ProviderStreamHandler>
Get a streamed response from a model
Sourcepub async fn function_completion(
&mut self,
function: Function,
) -> AgentResult<Value>
pub async fn function_completion( &mut self, function: Function, ) -> AgentResult<Value>
Get a function completion from a model, returns a JSON object
Trait Implementations§
Source§impl<'de> Deserialize<'de> for Agent
impl<'de> Deserialize<'de> for Agent
Source§fn deserialize<__D>(__deserializer: __D) -> Result<Self, __D::Error>where
__D: Deserializer<'de>,
fn deserialize<__D>(__deserializer: __D) -> Result<Self, __D::Error>where
__D: Deserializer<'de>,
Deserialize this value from the given Serde deserializer. Read more
Auto Trait Implementations§
impl Freeze for Agent
impl !RefUnwindSafe for Agent
impl Send for Agent
impl Sync for Agent
impl Unpin for Agent
impl !UnwindSafe for Agent
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more