1use std::sync::Arc;
2pub use synaptic_core::{ChatModel, ChatRequest, ChatResponse, ChatStream, Message, SynapticError};
3use synaptic_models::ProviderBackend;
4use synaptic_openai::{OpenAiChatModel, OpenAiConfig};
5
6#[derive(Debug, Clone, PartialEq, Eq)]
7pub enum DeepSeekModel {
8 DeepSeekChat,
9 DeepSeekReasoner,
10 DeepSeekCoderV2,
11 Custom(String),
12}
13impl DeepSeekModel {
14 pub fn as_str(&self) -> &str {
15 match self {
16 DeepSeekModel::DeepSeekChat => "deepseek-chat",
17 DeepSeekModel::DeepSeekReasoner => "deepseek-reasoner",
18 DeepSeekModel::DeepSeekCoderV2 => "deepseek-coder-v2",
19 DeepSeekModel::Custom(s) => s.as_str(),
20 }
21 }
22}
23impl std::fmt::Display for DeepSeekModel {
24 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
25 write!(f, "{}", self.as_str())
26 }
27}
28
29#[derive(Debug, Clone)]
30pub struct DeepSeekConfig {
31 pub api_key: String,
32 pub model: String,
33 pub max_tokens: Option<u32>,
34 pub temperature: Option<f64>,
35 pub top_p: Option<f64>,
36 pub stop: Option<Vec<String>>,
37 pub seed: Option<u64>,
38}
39impl DeepSeekConfig {
40 pub fn new(api_key: impl Into<String>, model: DeepSeekModel) -> Self {
41 Self {
42 api_key: api_key.into(),
43 model: model.to_string(),
44 max_tokens: None,
45 temperature: None,
46 top_p: None,
47 stop: None,
48 seed: None,
49 }
50 }
51 pub fn new_custom(api_key: impl Into<String>, model: impl Into<String>) -> Self {
52 Self {
53 api_key: api_key.into(),
54 model: model.into(),
55 max_tokens: None,
56 temperature: None,
57 top_p: None,
58 stop: None,
59 seed: None,
60 }
61 }
62 pub fn with_max_tokens(mut self, v: u32) -> Self {
63 self.max_tokens = Some(v);
64 self
65 }
66 pub fn with_temperature(mut self, v: f64) -> Self {
67 self.temperature = Some(v);
68 self
69 }
70 pub fn with_top_p(mut self, v: f64) -> Self {
71 self.top_p = Some(v);
72 self
73 }
74 pub fn with_stop(mut self, v: Vec<String>) -> Self {
75 self.stop = Some(v);
76 self
77 }
78 pub fn with_seed(mut self, v: u64) -> Self {
79 self.seed = Some(v);
80 self
81 }
82}
83impl From<DeepSeekConfig> for OpenAiConfig {
84 fn from(c: DeepSeekConfig) -> Self {
85 let mut cfg =
86 OpenAiConfig::new(c.api_key, c.model).with_base_url("https://api.deepseek.com/v1");
87 if let Some(v) = c.max_tokens {
88 cfg = cfg.with_max_tokens(v);
89 }
90 if let Some(v) = c.temperature {
91 cfg = cfg.with_temperature(v);
92 }
93 if let Some(v) = c.top_p {
94 cfg = cfg.with_top_p(v);
95 }
96 if let Some(v) = c.stop {
97 cfg = cfg.with_stop(v);
98 }
99 if let Some(v) = c.seed {
100 cfg = cfg.with_seed(v);
101 }
102 cfg
103 }
104}
105
106pub struct DeepSeekChatModel {
107 inner: OpenAiChatModel,
108}
109
110impl DeepSeekChatModel {
111 pub fn new(config: DeepSeekConfig, backend: Arc<dyn ProviderBackend>) -> Self {
112 Self {
113 inner: OpenAiChatModel::new(config.into(), backend),
114 }
115 }
116}
117
118#[async_trait::async_trait]
119impl ChatModel for DeepSeekChatModel {
120 async fn chat(&self, request: ChatRequest) -> Result<ChatResponse, SynapticError> {
121 self.inner.chat(request).await
122 }
123 fn stream_chat(&self, request: ChatRequest) -> ChatStream<'_> {
124 self.inner.stream_chat(request)
125 }
126}