Skip to main content

limit_llm/
providers.rs

1// limit-llm: Multi-Provider Support
2
3use async_trait::async_trait;
4use futures::Stream;
5use std::pin::Pin;
6
7use crate::error::LlmError;
8use crate::types::{Message, Tool, Usage};
9
10/// Response chunk from streaming LLM providers
11pub enum ProviderResponseChunk {
12    ContentDelta(String),
13    ReasoningDelta(String),
14    ToolCallDelta {
15        id: String,
16        name: String,
17        arguments: serde_json::Value,
18    },
19    Done(Usage),
20}
21
22/// Common trait for all LLM providers
23#[async_trait]
24pub trait LlmProvider: Send + Sync {
25    /// Send messages to the LLM and receive streaming response
26    #[allow(clippy::type_complexity)]
27    async fn send(
28        &self,
29        messages: Vec<Message>,
30        tools: Vec<Tool>,
31    ) -> Result<
32        Pin<Box<dyn Stream<Item = Result<ProviderResponseChunk, LlmError>> + Send + '_>>,
33        LlmError,
34    >;
35
36    /// Get provider name
37    fn provider_name(&self) -> &str;
38
39    /// Get model name
40    fn model_name(&self) -> &str;
41
42    /// Clone the provider
43    fn clone_box(&self) -> Box<dyn LlmProvider>;
44}
45
46/// Implement Clone for Box<dyn LlmProvider>
47impl Clone for Box<dyn LlmProvider> {
48    fn clone(&self) -> Self {
49        self.clone_box()
50    }
51}
52
53/// Provider configuration variants
54#[derive(Debug, Clone, serde::Deserialize)]
55#[serde(tag = "provider", rename_all = "lowercase")]
56pub enum ProviderConfig {
57    Anthropic(AnthropicConfig),
58    OpenAI(OpenAIConfig),
59    #[serde(other)]
60    Unknown,
61}
62
63/// Anthropic-specific configuration
64#[derive(Debug, Clone, serde::Deserialize)]
65pub struct AnthropicConfig {
66    pub api_key: Option<String>,
67    #[serde(default = "default_anthropic_model")]
68    pub model: String,
69    #[serde(default = "default_max_tokens")]
70    pub max_tokens: u32,
71    #[serde(default = "default_timeout")]
72    pub timeout: u64,
73    #[serde(default)]
74    pub base_url: Option<String>,
75}
76
77/// OpenAI-specific configuration
78#[derive(Debug, Clone, serde::Deserialize)]
79pub struct OpenAIConfig {
80    pub api_key: Option<String>,
81    #[serde(default = "default_openai_model")]
82    pub model: String,
83    #[serde(default = "default_max_tokens")]
84    pub max_tokens: u32,
85    #[serde(default = "default_timeout")]
86    pub timeout: u64,
87    #[serde(default)]
88    pub base_url: Option<String>,
89}
90
91fn default_anthropic_model() -> String {
92    "claude-3-5-sonnet-20241022".to_string()
93}
94
95fn default_openai_model() -> String {
96    "gpt-4".to_string()
97}
98
99fn default_max_tokens() -> u32 {
100    4096
101}
102
103fn default_timeout() -> u64 {
104    60
105}