strands_agents/models/
litellm.rs

1//! LiteLLM model provider.
2//!
3//! LiteLLM provides a unified interface to multiple LLM providers.
4//! See: https://docs.litellm.ai/
5
6use crate::types::content::{Message, SystemContentBlock};
7use crate::types::errors::StrandsError;
8use crate::types::tools::{ToolChoice, ToolSpec};
9
10use super::{Model, ModelConfig, StreamEventStream};
11
12/// Configuration for LiteLLM models.
13#[derive(Debug, Clone, Default)]
14pub struct LiteLLMConfig {
15    /// Model ID (e.g., "openai/gpt-4o", "anthropic/claude-3-sonnet").
16    pub model_id: String,
17    /// Model parameters (e.g., max_tokens, temperature).
18    pub params: Option<serde_json::Value>,
19    /// Client arguments for the LiteLLM client.
20    pub client_args: Option<serde_json::Value>,
21}
22
23impl LiteLLMConfig {
24    /// Create a new LiteLLM config.
25    pub fn new(model_id: impl Into<String>) -> Self {
26        Self {
27            model_id: model_id.into(),
28            params: None,
29            client_args: None,
30        }
31    }
32
33    /// Set model parameters.
34    pub fn with_params(mut self, params: serde_json::Value) -> Self {
35        self.params = Some(params);
36        self
37    }
38
39    /// Set client arguments.
40    pub fn with_client_args(mut self, client_args: serde_json::Value) -> Self {
41        self.client_args = Some(client_args);
42        self
43    }
44}
45
46/// LiteLLM model provider implementation.
47///
48/// This provides a unified interface to multiple LLM providers through LiteLLM.
49pub struct LiteLLMModel {
50    config: ModelConfig,
51    litellm_config: LiteLLMConfig,
52}
53
54impl LiteLLMModel {
55    /// Create a new LiteLLM model.
56    pub fn new(config: LiteLLMConfig) -> Self {
57        Self {
58            config: ModelConfig::new(&config.model_id),
59            litellm_config: config,
60        }
61    }
62
63    /// Get the LiteLLM configuration.
64    pub fn litellm_config(&self) -> &LiteLLMConfig {
65        &self.litellm_config
66    }
67
68    /// Update the LiteLLM configuration.
69    pub fn update_litellm_config(&mut self, config: LiteLLMConfig) {
70        self.config = ModelConfig::new(&config.model_id);
71        self.litellm_config = config;
72    }
73}
74
75impl Model for LiteLLMModel {
76    fn config(&self) -> &ModelConfig {
77        &self.config
78    }
79
80    fn update_config(&mut self, config: ModelConfig) {
81        self.config = config;
82    }
83
84    fn stream<'a>(
85        &'a self,
86        _messages: &'a [Message],
87        _tool_specs: Option<&'a [ToolSpec]>,
88        _system_prompt: Option<&'a str>,
89        _tool_choice: Option<ToolChoice>,
90        _system_prompt_content: Option<&'a [SystemContentBlock]>,
91    ) -> StreamEventStream<'a> {
92        Box::pin(futures::stream::once(async {
93            Err(StrandsError::ModelError {
94                message: "LiteLLM integration requires litellm-rs or HTTP client implementation".into(),
95                source: None,
96            })
97        }))
98    }
99}
100
101#[cfg(test)]
102mod tests {
103    use super::*;
104
105    #[test]
106    fn test_litellm_config() {
107        let config = LiteLLMConfig::new("openai/gpt-4o")
108            .with_params(serde_json::json!({"max_tokens": 1000}));
109        
110        assert_eq!(config.model_id, "openai/gpt-4o");
111        assert!(config.params.is_some());
112    }
113
114    #[test]
115    fn test_litellm_model_creation() {
116        let config = LiteLLMConfig::new("anthropic/claude-3-sonnet");
117        let model = LiteLLMModel::new(config);
118        
119        assert_eq!(model.config().model_id, "anthropic/claude-3-sonnet");
120    }
121}