Skip to main content

LLMClient

Trait LLMClient 

Source
pub trait LLMClient: Send + Sync {
    // Required methods
    fn generate<'life0, 'life1, 'async_trait>(
        &'life0 self,
        prompt: &'life1 str,
    ) -> Pin<Box<dyn Future<Output = Result<String>> + Send + 'async_trait>>
       where Self: 'async_trait,
             'life0: 'async_trait,
             'life1: 'async_trait;
    fn generate_with_system<'life0, 'life1, 'life2, 'async_trait>(
        &'life0 self,
        system: &'life1 str,
        prompt: &'life2 str,
    ) -> Pin<Box<dyn Future<Output = Result<String>> + Send + 'async_trait>>
       where Self: 'async_trait,
             'life0: 'async_trait,
             'life1: 'async_trait,
             'life2: 'async_trait;
    fn generate_with_history<'life0, 'life1, 'async_trait>(
        &'life0 self,
        messages: &'life1 [(String, String)],
    ) -> Pin<Box<dyn Future<Output = Result<String>> + Send + 'async_trait>>
       where Self: 'async_trait,
             'life0: 'async_trait,
             'life1: 'async_trait;
    fn generate_with_tools<'life0, 'life1, 'life2, 'async_trait>(
        &'life0 self,
        prompt: &'life1 str,
        tools: &'life2 [ToolDefinition],
    ) -> Pin<Box<dyn Future<Output = Result<LLMResponse>> + Send + 'async_trait>>
       where Self: 'async_trait,
             'life0: 'async_trait,
             'life1: 'async_trait,
             'life2: 'async_trait;
    fn generate_with_tools_and_history<'life0, 'life1, 'life2, 'async_trait>(
        &'life0 self,
        messages: &'life1 [ConversationMessage],
        tools: &'life2 [ToolDefinition],
    ) -> Pin<Box<dyn Future<Output = Result<LLMResponse>> + Send + 'async_trait>>
       where Self: 'async_trait,
             'life0: 'async_trait,
             'life1: 'async_trait,
             'life2: 'async_trait;
    fn stream<'life0, 'life1, 'async_trait>(
        &'life0 self,
        prompt: &'life1 str,
    ) -> Pin<Box<dyn Future<Output = Result<Box<dyn Stream<Item = Result<String>> + Send + Unpin>>> + Send + 'async_trait>>
       where Self: 'async_trait,
             'life0: 'async_trait,
             'life1: 'async_trait;
    fn stream_with_system<'life0, 'life1, 'life2, 'async_trait>(
        &'life0 self,
        system: &'life1 str,
        prompt: &'life2 str,
    ) -> Pin<Box<dyn Future<Output = Result<Box<dyn Stream<Item = Result<String>> + Send + Unpin>>> + Send + 'async_trait>>
       where Self: 'async_trait,
             'life0: 'async_trait,
             'life1: 'async_trait,
             'life2: 'async_trait;
    fn stream_with_history<'life0, 'life1, 'async_trait>(
        &'life0 self,
        messages: &'life1 [(String, String)],
    ) -> Pin<Box<dyn Future<Output = Result<Box<dyn Stream<Item = Result<String>> + Send + Unpin>>> + Send + 'async_trait>>
       where Self: 'async_trait,
             'life0: 'async_trait,
             'life1: 'async_trait;
    fn model_name(&self) -> &str;
}
Expand description

Generic LLM client trait for provider abstraction

Required Methods§

Source

fn generate<'life0, 'life1, 'async_trait>( &'life0 self, prompt: &'life1 str, ) -> Pin<Box<dyn Future<Output = Result<String>> + Send + 'async_trait>>
where Self: 'async_trait, 'life0: 'async_trait, 'life1: 'async_trait,

Generate a completion from a prompt

Source

fn generate_with_system<'life0, 'life1, 'life2, 'async_trait>( &'life0 self, system: &'life1 str, prompt: &'life2 str, ) -> Pin<Box<dyn Future<Output = Result<String>> + Send + 'async_trait>>
where Self: 'async_trait, 'life0: 'async_trait, 'life1: 'async_trait, 'life2: 'async_trait,

Generate with system prompt

Source

fn generate_with_history<'life0, 'life1, 'async_trait>( &'life0 self, messages: &'life1 [(String, String)], ) -> Pin<Box<dyn Future<Output = Result<String>> + Send + 'async_trait>>
where Self: 'async_trait, 'life0: 'async_trait, 'life1: 'async_trait,

Generate with conversation history

Source

fn generate_with_tools<'life0, 'life1, 'life2, 'async_trait>( &'life0 self, prompt: &'life1 str, tools: &'life2 [ToolDefinition], ) -> Pin<Box<dyn Future<Output = Result<LLMResponse>> + Send + 'async_trait>>
where Self: 'async_trait, 'life0: 'async_trait, 'life1: 'async_trait, 'life2: 'async_trait,

Generate with tool calling support

Source

fn generate_with_tools_and_history<'life0, 'life1, 'life2, 'async_trait>( &'life0 self, messages: &'life1 [ConversationMessage], tools: &'life2 [ToolDefinition], ) -> Pin<Box<dyn Future<Output = Result<LLMResponse>> + Send + 'async_trait>>
where Self: 'async_trait, 'life0: 'async_trait, 'life1: 'async_trait, 'life2: 'async_trait,

Generate with conversation history AND tool definitions.

This is the core method for multi-turn tool calling, combining:

  • generate_with_history() - conversation context
  • generate_with_tools() - tool calling capability
§Arguments
  • messages - Conversation history as ConversationMessage structs
  • tools - Available tool definitions
§Returns

An LLMResponse containing the model’s reply and any tool calls requested.

Source

fn stream<'life0, 'life1, 'async_trait>( &'life0 self, prompt: &'life1 str, ) -> Pin<Box<dyn Future<Output = Result<Box<dyn Stream<Item = Result<String>> + Send + Unpin>>> + Send + 'async_trait>>
where Self: 'async_trait, 'life0: 'async_trait, 'life1: 'async_trait,

Stream a completion

Source

fn stream_with_system<'life0, 'life1, 'life2, 'async_trait>( &'life0 self, system: &'life1 str, prompt: &'life2 str, ) -> Pin<Box<dyn Future<Output = Result<Box<dyn Stream<Item = Result<String>> + Send + Unpin>>> + Send + 'async_trait>>
where Self: 'async_trait, 'life0: 'async_trait, 'life1: 'async_trait, 'life2: 'async_trait,

Stream a completion with system prompt

Source

fn stream_with_history<'life0, 'life1, 'async_trait>( &'life0 self, messages: &'life1 [(String, String)], ) -> Pin<Box<dyn Future<Output = Result<Box<dyn Stream<Item = Result<String>> + Send + Unpin>>> + Send + 'async_trait>>
where Self: 'async_trait, 'life0: 'async_trait, 'life1: 'async_trait,

Stream a completion with conversation history

Source

fn model_name(&self) -> &str

Get the model name/identifier

Implementors§

Source§

impl LLMClient for OllamaClient

Available on crate feature ollama only.
Source§

impl LLMClient for OpenAIClient

Available on crate feature openai only.