Skip to main content

agent_io/llm/deepseek/
mod.rs

1//! DeepSeek Chat Model implementation
2
3mod builder;
4
5use async_trait::async_trait;
6
7use crate::llm::{
8    BaseChatModel, ChatCompletion, ChatStream, LlmError, Message, ToolChoice, ToolDefinition,
9};
10
11pub use builder::ChatDeepSeekBuilder;
12
13/// DeepSeek Chat Model
14///
15/// # Example
16/// ```ignore
17/// use agent_io::llm::ChatDeepSeek;
18///
19/// let llm = ChatDeepSeek::new("deepseek-chat")?;
20/// ```
21pub struct ChatDeepSeek {
22    pub(super) inner: crate::llm::openai_compatible::ChatOpenAICompatible,
23}
24
25impl ChatDeepSeek {
26    /// Create a new DeepSeek chat model
27    pub fn new(model: impl Into<String>) -> Result<Self, LlmError> {
28        Self::builder().model(model).build()
29    }
30
31    /// Create a builder for configuration
32    pub fn builder() -> ChatDeepSeekBuilder {
33        ChatDeepSeekBuilder::default()
34    }
35}
36
37#[async_trait]
38impl BaseChatModel for ChatDeepSeek {
39    fn model(&self) -> &str {
40        self.inner.model()
41    }
42
43    fn provider(&self) -> &str {
44        "deepseek"
45    }
46
47    fn context_window(&self) -> Option<u64> {
48        Some(64_000)
49    }
50
51    async fn invoke(
52        &self,
53        messages: Vec<Message>,
54        tools: Option<Vec<ToolDefinition>>,
55        tool_choice: Option<ToolChoice>,
56    ) -> Result<ChatCompletion, LlmError> {
57        self.inner.invoke(messages, tools, tool_choice).await
58    }
59
60    async fn invoke_stream(
61        &self,
62        messages: Vec<Message>,
63        tools: Option<Vec<ToolDefinition>>,
64        tool_choice: Option<ToolChoice>,
65    ) -> Result<ChatStream, LlmError> {
66        self.inner.invoke_stream(messages, tools, tool_choice).await
67    }
68}