ds_api/agent/agent_core.rs
1use std::collections::HashMap;
2
3use crate::api::ApiClient;
4use crate::conversation::{Conversation, LlmSummarizer, Summarizer};
5use crate::raw::request::message::{Message, Role};
6use crate::tool_trait::Tool;
7use serde_json::Value;
8use tokio::sync::mpsc;
9
10/// A tool call fragment emitted by [`AgentStream`][crate::agent::AgentStream].
11///
12/// In streaming mode multiple `ToolCallChunk`s are emitted per tool call:
13/// the first has an empty `delta` (name is known, no args yet); subsequent
14/// chunks carry incremental argument JSON. In non-streaming mode a single
15/// chunk is emitted with the complete argument JSON in `delta`.
16#[derive(Debug, Clone)]
17pub struct ToolCallChunk {
18 pub id: String,
19 pub name: String,
20 pub delta: String,
21 pub index: u32,
22}
23
24/// The result of a completed tool invocation.
25///
26/// Yielded as `AgentEvent::ToolResult` after the tool has finished executing.
27#[derive(Debug, Clone)]
28pub struct ToolCallResult {
29 pub id: String,
30 pub name: String,
31 pub args: String,
32 pub result: Value,
33}
34
35/// Events emitted by [`AgentStream`][crate::agent::AgentStream].
36///
37/// Each variant represents a distinct, self-contained event in the agent lifecycle:
38///
39/// - `Token(String)` — a text fragment from the assistant. In streaming mode each
40/// `Token` is a single SSE delta; in non-streaming mode the full response text
41/// arrives as one `Token`.
42/// - `ToolCall(id, name, delta)` — a tool call fragment. Behaves exactly like
43/// `Token`: in streaming mode one event is emitted per SSE chunk (first chunk has
44/// an empty `delta` and carries the tool name; subsequent chunks carry incremental
45/// argument JSON). In non-streaming mode a single event is emitted with the
46/// complete arguments string. Accumulate `delta` values by `id` to reconstruct
47/// the full argument JSON. Execution begins after all chunks for a turn are
48/// delivered.
49/// - `ToolResult(ToolCallResult)` — a tool has finished executing. One event is
50/// emitted per call, in the same order as the corresponding `ToolCall` events.
51#[derive(Debug, Clone)]
52pub enum AgentEvent {
53 Token(String),
54 /// Emitted when the model produces reasoning/thinking content (e.g. deepseek-reasoner).
55 /// In streaming mode this arrives token-by-token before the main reply.
56 ReasoningToken(String),
57 ToolCall(ToolCallChunk),
58 ToolResult(ToolCallResult),
59}
60
61/// An agent that combines a [`Conversation`] with a set of callable tools.
62///
63/// Build one with the fluent builder methods, then call [`chat`][DeepseekAgent::chat]
64/// to start a turn:
65///
66/// ```no_run
67/// use ds_api::{DeepseekAgent, tool};
68/// use serde_json::{Value, json};
69///
70/// struct MyTool;
71///
72/// #[tool]
73/// impl ds_api::Tool for MyTool {
74/// async fn greet(&self, name: String) -> Value {
75/// json!({ "greeting": format!("Hello, {name}!") })
76/// }
77/// }
78///
79/// # #[tokio::main] async fn main() {
80/// let agent = DeepseekAgent::new("sk-...")
81/// .add_tool(MyTool);
82/// # }
83/// ```
84pub struct DeepseekAgent {
85 /// The conversation manages history, the API client, and context-window compression.
86 pub(crate) conversation: Conversation,
87 pub(crate) tools: Vec<Box<dyn Tool>>,
88 pub(crate) tool_index: HashMap<String, usize>,
89 /// When `true` the agent uses SSE streaming for each API turn so `Token` events
90 /// arrive incrementally. When `false` (default) the full response is awaited.
91 pub(crate) streaming: bool,
92 /// The model to use for every API turn. Defaults to `"deepseek-chat"`.
93 pub(crate) model: String,
94 /// Optional channel for injecting user messages mid-loop.
95 /// Messages received here are drained after each tool-execution round and
96 /// appended to the conversation history as `Role::User` messages before the
97 /// next API turn begins.
98 pub(crate) interrupt_rx: Option<mpsc::UnboundedReceiver<String>>,
99 /// Optional map of extra top-level JSON fields to merge into the API request body.
100 /// This is used by the builder helpers below to attach custom provider-specific
101 /// fields that the typed request doesn't yet expose.
102 pub(crate) extra_body: Option<serde_json::Map<String, serde_json::Value>>,
103}
104
105impl DeepseekAgent {
106 fn from_parts(client: ApiClient, model: impl Into<String>) -> Self {
107 let model = model.into();
108 let summarizer = LlmSummarizer::new(client.clone()).with_model(model.clone());
109 Self {
110 conversation: Conversation::new(client).with_summarizer(summarizer),
111 tools: vec![],
112 tool_index: HashMap::new(),
113 streaming: false,
114 model,
115 interrupt_rx: None,
116 extra_body: None,
117 }
118 }
119
120 /// Create a new agent targeting the DeepSeek API with `deepseek-chat`.
121 pub fn new(token: impl Into<String>) -> Self {
122 Self::from_parts(ApiClient::new(token), "deepseek-chat")
123 }
124
125 /// Create an agent targeting an OpenAI-compatible provider.
126 ///
127 /// All three parameters are set at construction time and never change:
128 ///
129 /// ```no_run
130 /// use ds_api::DeepseekAgent;
131 ///
132 /// let agent = DeepseekAgent::custom(
133 /// "sk-or-...",
134 /// "https://openrouter.ai/api/v1",
135 /// "meta-llama/llama-3.3-70b-instruct:free",
136 /// );
137 /// ```
138 pub fn custom(
139 token: impl Into<String>,
140 base_url: impl Into<String>,
141 model: impl Into<String>,
142 ) -> Self {
143 let client = ApiClient::new(token).with_base_url(base_url);
144 Self::from_parts(client, model)
145 }
146
147 /// Register a tool (builder-style, supports chaining).
148 ///
149 /// The tool's protocol-level function names are indexed so incoming tool-call
150 /// requests from the model can be dispatched to the correct implementation.
151 pub fn add_tool<TT: Tool + 'static>(mut self, tool: TT) -> Self {
152 let idx = self.tools.len();
153 for raw in tool.raw_tools() {
154 self.tool_index.insert(raw.function.name.clone(), idx);
155 }
156 self.tools.push(Box::new(tool));
157 self
158 }
159
160 /// Push a user message and return an [`AgentStream`][crate::agent::AgentStream]
161 /// that drives the full agent loop (API calls + tool execution).
162 pub fn chat(mut self, user_message: &str) -> crate::agent::stream::AgentStream {
163 self.conversation.push_user_input(user_message);
164 crate::agent::stream::AgentStream::new(self)
165 }
166
167 /// Start an agent turn from the current history **without** pushing a new
168 /// user message first.
169 ///
170 /// Use this when you have already appended the user message manually (e.g.
171 /// via [`push_user_message_with_name`][Self::push_user_message_with_name])
172 /// and want to drive the agent loop from that point.
173 pub fn chat_from_history(self) -> crate::agent::stream::AgentStream {
174 crate::agent::stream::AgentStream::new(self)
175 }
176
177 /// Enable SSE streaming for each API turn (builder-style).
178 pub fn with_streaming(mut self) -> Self {
179 self.streaming = true;
180 self
181 }
182
183 /// Merge arbitrary top-level JSON key/value pairs into the request body for
184 /// the next API turn. The pairs are stored on the agent and later merged
185 /// into the `ApiRequest` raw body when a request is built.
186 ///
187 /// Example:
188 /// let mut map = serde_json::Map::new();
189 /// map.insert(\"foo\".to_string(), serde_json::json!(\"bar\"));
190 /// let agent = DeepseekAgent::new(\"sk-...\").extra_body(map);
191 pub fn extra_body(mut self, map: serde_json::Map<String, serde_json::Value>) -> Self {
192 if let Some(ref mut existing) = self.extra_body {
193 existing.extend(map);
194 } else {
195 self.extra_body = Some(map);
196 }
197 self
198 }
199
200 /// Add a single extra top-level field to be merged into the request body.
201 /// Convenience helper to avoid constructing a full map.
202 pub fn extra_field(mut self, key: impl Into<String>, value: serde_json::Value) -> Self {
203 if let Some(ref mut m) = self.extra_body {
204 m.insert(key.into(), value);
205 } else {
206 let mut m = serde_json::Map::new();
207 m.insert(key.into(), value);
208 self.extra_body = Some(m);
209 }
210 self
211 }
212
213 /// Prepend a permanent system prompt to the conversation history (builder-style).
214 ///
215 /// System messages added this way are never removed by the built-in summarizers.
216 pub fn with_system_prompt(mut self, prompt: impl Into<String>) -> Self {
217 self.conversation
218 .history_mut()
219 .insert(0, Message::new(Role::System, &prompt.into()));
220 self
221 }
222
223 /// Replace the summarizer used for context-window management (builder-style).
224 pub fn with_summarizer(mut self, summarizer: impl Summarizer + 'static) -> Self {
225 self.conversation = self.conversation.with_summarizer(summarizer);
226 self
227 }
228
229 /// Seed the agent with an existing message history (builder-style).
230 ///
231 /// Used to restore a conversation from persistent storage (e.g. SQLite)
232 /// after a process restart. The messages are set directly on the
233 /// underlying `Conversation` and will be included in the next API call.
234 ///
235 /// # Example
236 ///
237 /// ```no_run
238 /// use ds_api::DeepseekAgent;
239 /// use ds_api::raw::request::message::{Message, Role};
240 ///
241 /// # #[tokio::main] async fn main() {
242 /// let history = vec![
243 /// Message::new(Role::User, "Hello"),
244 /// Message::new(Role::Assistant, "Hi there!"),
245 /// ];
246 /// let agent = DeepseekAgent::new("sk-...").with_history(history);
247 /// # }
248 /// ```
249 pub fn with_history(mut self, history: Vec<crate::raw::request::message::Message>) -> Self {
250 self.conversation = self.conversation.with_history(history);
251 self
252 }
253
254 /// Append a user message with an optional display name to the conversation
255 /// history.
256 ///
257 /// The `name` field is passed through to the API as-is (OpenAI-compatible
258 /// providers use it to distinguish speakers in a shared channel).
259 ///
260 /// # Example
261 ///
262 /// ```no_run
263 /// use ds_api::DeepseekAgent;
264 ///
265 /// # #[tokio::main] async fn main() {
266 /// let mut agent = DeepseekAgent::new("sk-...");
267 /// agent.push_user_message_with_name("What time is it?", Some("alice"));
268 /// # }
269 /// ```
270 pub fn push_user_message_with_name(&mut self, text: &str, name: Option<&str>) {
271 use crate::raw::request::message::{Message, Role};
272 let mut msg = Message::new(Role::User, text);
273 msg.name = name.map(|n| n.to_string());
274 self.conversation.history_mut().push(msg);
275 }
276
277 /// Read-only view of the current conversation history.
278 ///
279 /// Returns all messages in order, including system prompts, user turns,
280 /// assistant replies, tool calls, and tool results. Auto-summary messages
281 /// inserted by the built-in summarizers are also included.
282 ///
283 /// # Example
284 ///
285 /// ```no_run
286 /// use ds_api::DeepseekAgent;
287 ///
288 /// # #[tokio::main] async fn main() {
289 /// let agent = DeepseekAgent::new("sk-...");
290 /// for msg in agent.history() {
291 /// println!("{:?}: {:?}", msg.role, msg.content);
292 /// }
293 /// # }
294 /// ```
295 pub fn history(&self) -> &[crate::raw::request::message::Message] {
296 self.conversation.history()
297 }
298
299 /// Attach an interrupt channel to the agent (builder-style).
300 ///
301 /// Returns the agent and the sender half of the channel. Send any `String`
302 /// through the `UnboundedSender` at any time; the message will be picked up
303 /// after the current tool-execution round finishes and inserted into the
304 /// conversation history as a `Role::User` message before the next API turn.
305 ///
306 /// # Example
307 ///
308 /// ```no_run
309 /// use ds_api::DeepseekAgent;
310 /// use tokio::sync::mpsc;
311 ///
312 /// # #[tokio::main] async fn main() {
313 /// let (agent, tx) = DeepseekAgent::new("sk-...")
314 /// .with_interrupt_channel();
315 ///
316 /// // In another task or callback:
317 /// tx.send("Actually, use Python instead.".into()).unwrap();
318 /// # }
319 /// ```
320 pub fn with_interrupt_channel(mut self) -> (Self, mpsc::UnboundedSender<String>) {
321 let (tx, rx) = mpsc::unbounded_channel();
322 self.interrupt_rx = Some(rx);
323 (self, tx)
324 }
325
326 /// Drain any pending messages from the interrupt channel and append them
327 /// to the conversation history as `Role::User` messages.
328 ///
329 /// Called by the state machine in [`AgentStream`] at the top of every
330 /// `Idle` transition so that injected messages are visible before each API
331 /// turn, not just after tool-execution rounds.
332 pub(crate) fn drain_interrupts(&mut self) {
333 if let Some(rx) = self.interrupt_rx.as_mut() {
334 while let Ok(msg) = rx.try_recv() {
335 self.conversation
336 .history_mut()
337 .push(Message::new(Role::User, &msg));
338 }
339 }
340 }
341}