pub struct Agent<C: Client> { /* private fields */ }Expand description
Agent that automatically executes tools in a loop.
Unlike the raw Client, an Agent handles tool execution automatically:
- Sends request with tool definitions from the MCP server (if configured)
- Receives response with potential function calls
- Executes tools automatically
- Adds results back to conversation
- Loops until no more function calls
§Example
ⓘ
use unia::agent::Agent;
use unia::providers::{Gemini, Provider};
use unia::model::{Message, Part};
let client = Gemini::create("api_key".to_string(), "gemini-3.0-pro".to_string());
let agent = Agent::new(client)
.with_server(weather_server);
let messages = vec![
Message::User(vec![
Part::Text { content: "What's the weather?".into(), finished: true }
])
];
let response = agent.chat(messages).await?;Implementations§
Source§impl<C: Client> Agent<C>
impl<C: Client> Agent<C>
Sourcepub fn new(client: C) -> Self
pub fn new(client: C) -> Self
Create a new agent.
§Arguments
client: The initialized client instance
Tools are fetched from the configured MCP server if available.
Sourcepub fn with_server<S: MCPServer + 'static>(self, server: S) -> Self
pub fn with_server<S: MCPServer + 'static>(self, server: S) -> Self
Set the MCP server for the agent.
Sourcepub fn with_max_iterations(self, max: usize) -> Self
pub fn with_max_iterations(self, max: usize) -> Self
Set the maximum number of iterations for the agentic loop.
Sourcepub async fn chat(
&self,
messages: Vec<Message>,
) -> Result<Response, ClientError>
pub async fn chat( &self, messages: Vec<Message>, ) -> Result<Response, ClientError>
Send a chat request with automatic tool execution.
This method automatically handles the tool execution loop:
- Sends request to LLM with tools from the MCP server (if configured)
- Executes any tool calls
- Continues until no more tool calls or max iterations reached
§Arguments
messages: Conversation messages
§Returns
The response containing all new messages generated during the execution (including tool calls and results)
Sourcepub fn chat_stream<'a>(
&'a self,
messages: Vec<Message>,
) -> Pin<Box<dyn Stream<Item = Result<Response, ClientError>> + Send + 'a>>where
C: StreamingClient,
pub fn chat_stream<'a>(
&'a self,
messages: Vec<Message>,
) -> Pin<Box<dyn Stream<Item = Result<Response, ClientError>> + Send + 'a>>where
C: StreamingClient,
Send a streaming chat request with automatic tool execution.
This method automatically handles the tool execution loop with streaming:
- Sends streaming request to LLM with tools from the MCP server (if configured)
- Executes any tool calls
- Continues until no more tool calls or max iterations reached
§Arguments
messages: Conversation messages
§Returns
A stream of chunks for the final response after all tool executions complete
Auto Trait Implementations§
impl<C> Freeze for Agent<C>where
C: Freeze,
impl<C> !RefUnwindSafe for Agent<C>
impl<C> Send for Agent<C>
impl<C> Sync for Agent<C>
impl<C> Unpin for Agent<C>where
C: Unpin,
impl<C> !UnwindSafe for Agent<C>
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more