pub struct LlmAgent { /* private fields */ }Expand description
An agent that uses an LLM (Large Language Model) to process messages.
The LlmAgent integrates with the Ceylon Mesh agent system and delegates
message processing to an LLM. It supports:
- Configurable system prompts
- Model parameters (temperature, max tokens, etc.)
- Tool calling integration with existing
ToolInvoker - Multiple LLM providers (OpenAI, Anthropic, Ollama, etc.)
- Optional memory module integration
§Examples
use runtime::llm::{LlmAgent, LLMConfig};
// Create agent with Ollama (local, no API key needed)
let agent = LlmAgent::builder("my_agent", "ollama::llama2")
.with_system_prompt("You are a helpful assistant.")
.build()
.expect("Failed to create agent");
// Create agent with OpenAI
let agent = LlmAgent::builder("gpt_agent", "openai::gpt-4")
.with_api_key(std::env::var("OPENAI_API_KEY").unwrap())
.with_temperature(0.7)
.build()
.expect("Failed to create agent");Implementations§
Source§impl LlmAgent
impl LlmAgent
Sourcepub fn builder(
name: impl Into<String>,
model: impl Into<String>,
) -> LlmAgentBuilder
pub fn builder( name: impl Into<String>, model: impl Into<String>, ) -> LlmAgentBuilder
Create a builder for constructing an LlmAgent.
§Arguments
name- The name of the agentmodel- The model in “provider::model” format (e.g., “openai::gpt-4”, “ollama::llama2”)
§Examples
use runtime::llm::LlmAgent;
let agent = LlmAgent::builder("my_agent", "ollama::llama2")
.build()
.expect("Failed to create agent");Sourcepub fn new_with_config(
name: impl Into<String>,
config: LLMConfig,
system_prompt: impl Into<String>,
memory: Option<Arc<dyn Memory>>,
) -> Result<Self>
pub fn new_with_config( name: impl Into<String>, config: LLMConfig, system_prompt: impl Into<String>, memory: Option<Arc<dyn Memory>>, ) -> Result<Self>
Create an LlmAgent with comprehensive LLMConfig
Sourcepub fn with_react(&mut self, config: ReActConfig)
pub fn with_react(&mut self, config: ReActConfig)
Enable ReAct (Reason + Act) mode
Sourcepub async fn send_message_react(
&mut self,
message: impl Into<String>,
ctx: &mut AgentContext,
) -> Result<ReActResult>
pub async fn send_message_react( &mut self, message: impl Into<String>, ctx: &mut AgentContext, ) -> Result<ReActResult>
Send a message using ReAct reasoning mode
Sourcepub async fn send_message_and_get_response(
&mut self,
message: impl Into<String>,
ctx: &mut AgentContext,
) -> Result<String>
pub async fn send_message_and_get_response( &mut self, message: impl Into<String>, ctx: &mut AgentContext, ) -> Result<String>
Send a message and get the LLM’s response This is a convenience method for Python bindings and direct usage. It processes the message with the LLM and returns the response text.
Sourcepub fn last_response(&self) -> Option<String>
pub fn last_response(&self) -> Option<String>
Get the last assistant response from conversation history
Trait Implementations§
Source§impl Agent for LlmAgent
impl Agent for LlmAgent
fn name(&self) -> String
fn on_message<'life0, 'life1, 'async_trait>(
&'life0 mut self,
msg: CeylonMessage,
ctx: &'life1 mut AgentContext,
) -> Pin<Box<dyn Future<Output = Result<()>> + Send + 'async_trait>>where
Self: 'async_trait,
'life0: 'async_trait,
'life1: 'async_trait,
Source§fn on_generic_message<'life0, 'life1, 'async_trait>(
&'life0 mut self,
msg: GenericMessage,
ctx: &'life1 mut AgentContext,
) -> Pin<Box<dyn Future<Output = Result<GenericResponse>> + Send + 'async_trait>>where
Self: 'async_trait,
'life0: 'async_trait,
'life1: 'async_trait,
fn on_generic_message<'life0, 'life1, 'async_trait>(
&'life0 mut self,
msg: GenericMessage,
ctx: &'life1 mut AgentContext,
) -> Pin<Box<dyn Future<Output = Result<GenericResponse>> + Send + 'async_trait>>where
Self: 'async_trait,
'life0: 'async_trait,
'life1: 'async_trait,
Handle a generic string message and return a generic response.
Default implementation returns an echo response.
Source§fn tool_invoker(&self) -> Option<&ToolInvoker>
fn tool_invoker(&self) -> Option<&ToolInvoker>
Get the tool invoker for this agent (if it has actions)
Source§fn tool_invoker_mut(&mut self) -> Option<&mut ToolInvoker>
fn tool_invoker_mut(&mut self) -> Option<&mut ToolInvoker>
Get mutable tool invoker for registration
fn on_start<'life0, 'life1, 'async_trait>(
&'life0 mut self,
_ctx: &'life1 mut AgentContext,
) -> Pin<Box<dyn Future<Output = Result<()>> + Send + 'async_trait>>where
Self: 'async_trait,
'life0: 'async_trait,
'life1: 'async_trait,
fn on_stop<'life0, 'life1, 'async_trait>(
&'life0 mut self,
_ctx: &'life1 mut AgentContext,
) -> Pin<Box<dyn Future<Output = Result<()>> + Send + 'async_trait>>where
Self: 'async_trait,
'life0: 'async_trait,
'life1: 'async_trait,
Auto Trait Implementations§
impl Freeze for LlmAgent
impl !RefUnwindSafe for LlmAgent
impl Send for LlmAgent
impl Sync for LlmAgent
impl Unpin for LlmAgent
impl !UnwindSafe for LlmAgent
Blanket Implementations§
§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more
Source§impl<T> Instrument for T
impl<T> Instrument for T
Source§fn instrument(self, span: Span) -> Instrumented<Self>
fn instrument(self, span: Span) -> Instrumented<Self>
Source§fn in_current_span(self) -> Instrumented<Self>
fn in_current_span(self) -> Instrumented<Self>
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left is true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left(&self) returns true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read more