Skip to main content

tuitbot_core/llm/
mod.rs

1//! LLM provider abstraction and implementations.
2//!
3//! Provides a trait-based abstraction for LLM providers (OpenAI, Anthropic, Ollama)
4//! with typed responses, token usage tracking, and health checking.
5
6pub mod anthropic;
7pub mod factory;
8pub mod openai_compat;
9
10use crate::error::LlmError;
11
12/// Token usage information from an LLM completion.
13#[derive(Debug, Clone, Default)]
14pub struct TokenUsage {
15    /// Number of tokens in the input/prompt.
16    pub input_tokens: u32,
17    /// Number of tokens in the output/completion.
18    pub output_tokens: u32,
19}
20
21/// Response from an LLM completion request.
22#[derive(Debug, Clone)]
23pub struct LlmResponse {
24    /// The generated text content.
25    pub text: String,
26    /// Token usage for this completion.
27    pub usage: TokenUsage,
28    /// The model that produced this response.
29    pub model: String,
30}
31
32/// Parameters controlling LLM generation behavior.
33#[derive(Debug, Clone)]
34pub struct GenerationParams {
35    /// Maximum number of tokens to generate.
36    pub max_tokens: u32,
37    /// Sampling temperature (0.0 = deterministic, 1.0+ = creative).
38    pub temperature: f32,
39    /// Optional system prompt override. If `Some`, replaces the caller's system prompt.
40    pub system_prompt: Option<String>,
41}
42
43impl Default for GenerationParams {
44    fn default() -> Self {
45        Self {
46            max_tokens: 512,
47            temperature: 0.7,
48            system_prompt: None,
49        }
50    }
51}
52
53/// Trait abstracting all LLM provider operations.
54///
55/// Implementations include `OpenAiCompatProvider` (for OpenAI and Ollama)
56/// and `AnthropicProvider`. The trait is object-safe for use as `Box<dyn LlmProvider>`.
57#[async_trait::async_trait]
58pub trait LlmProvider: Send + Sync {
59    /// Returns the display name of this provider (e.g., "openai", "anthropic", "ollama").
60    fn name(&self) -> &str;
61
62    /// Send a completion request to the LLM.
63    ///
64    /// If `params.system_prompt` is `Some`, it overrides the `system` parameter.
65    async fn complete(
66        &self,
67        system: &str,
68        user_message: &str,
69        params: &GenerationParams,
70    ) -> Result<LlmResponse, LlmError>;
71
72    /// Check if the provider is reachable and configured correctly.
73    async fn health_check(&self) -> Result<(), LlmError>;
74}