Skip to main content

agent_io/llm/
deepseek.rs

1//! DeepSeek Chat Model implementation
2
3use async_trait::async_trait;
4
5use crate::llm::openai_compatible::ChatOpenAICompatible;
6use crate::llm::{
7    BaseChatModel, ChatCompletion, ChatStream, LlmError, Message, ToolChoice, ToolDefinition,
8};
9
10const DEEPSEEK_URL: &str = "https://api.deepseek.com/v1";
11
12/// DeepSeek Chat Model
13///
14/// # Example
15/// ```ignore
16/// use agent_io::llm::ChatDeepSeek;
17///
18/// let llm = ChatDeepSeek::new("deepseek-chat")?;
19/// ```
20pub struct ChatDeepSeek {
21    inner: ChatOpenAICompatible,
22}
23
24impl ChatDeepSeek {
25    /// Create a new DeepSeek chat model
26    pub fn new(model: impl Into<String>) -> Result<Self, LlmError> {
27        Self::builder().model(model).build()
28    }
29
30    /// Create a builder for configuration
31    pub fn builder() -> ChatDeepSeekBuilder {
32        ChatDeepSeekBuilder::default()
33    }
34}
35
36#[derive(Default)]
37pub struct ChatDeepSeekBuilder {
38    model: Option<String>,
39    api_key: Option<String>,
40    temperature: Option<f32>,
41    max_tokens: Option<u64>,
42}
43
44impl ChatDeepSeekBuilder {
45    pub fn model(mut self, model: impl Into<String>) -> Self {
46        self.model = Some(model.into());
47        self
48    }
49
50    pub fn api_key(mut self, key: impl Into<String>) -> Self {
51        self.api_key = Some(key.into());
52        self
53    }
54
55    pub fn temperature(mut self, temp: f32) -> Self {
56        self.temperature = Some(temp);
57        self
58    }
59
60    pub fn max_tokens(mut self, tokens: u64) -> Self {
61        self.max_tokens = Some(tokens);
62        self
63    }
64
65    pub fn build(self) -> Result<ChatDeepSeek, LlmError> {
66        let model = self
67            .model
68            .ok_or_else(|| LlmError::Config("model is required".into()))?;
69
70        let api_key = self
71            .api_key
72            .or_else(|| std::env::var("DEEPSEEK_API_KEY").ok())
73            .ok_or_else(|| LlmError::Config("DEEPSEEK_API_KEY not set".into()))?;
74
75        let inner = ChatOpenAICompatible::builder()
76            .model(&model)
77            .base_url(DEEPSEEK_URL)
78            .provider("deepseek")
79            .api_key(Some(api_key))
80            .temperature(self.temperature.unwrap_or(0.2))
81            .max_completion_tokens(self.max_tokens)
82            .build()?;
83
84        Ok(ChatDeepSeek { inner })
85    }
86}
87
88#[async_trait]
89impl BaseChatModel for ChatDeepSeek {
90    fn model(&self) -> &str {
91        self.inner.model()
92    }
93
94    fn provider(&self) -> &str {
95        "deepseek"
96    }
97
98    fn context_window(&self) -> Option<u64> {
99        Some(64_000)
100    }
101
102    async fn invoke(
103        &self,
104        messages: Vec<Message>,
105        tools: Option<Vec<ToolDefinition>>,
106        tool_choice: Option<ToolChoice>,
107    ) -> Result<ChatCompletion, LlmError> {
108        self.inner.invoke(messages, tools, tool_choice).await
109    }
110
111    async fn invoke_stream(
112        &self,
113        messages: Vec<Message>,
114        tools: Option<Vec<ToolDefinition>>,
115        tool_choice: Option<ToolChoice>,
116    ) -> Result<ChatStream, LlmError> {
117        self.inner.invoke_stream(messages, tools, tool_choice).await
118    }
119}