ds_api/agent/agent_core.rs
1use std::collections::HashMap;
2
3use crate::api::ApiClient;
4use crate::conversation::{Conversation, LlmSummarizer, Summarizer};
5use crate::raw::request::message::{Message, Role};
6use crate::tool_trait::Tool;
7use serde_json::Value;
8
9/// Information about a tool call requested by the model.
10///
11/// Yielded as `AgentEvent::ToolCall` when the model requests a tool invocation.
12/// At this point the tool has not yet been executed.
13#[derive(Debug, Clone)]
14pub struct ToolCallInfo {
15 pub id: String,
16 pub name: String,
17 pub args: Value,
18}
19
20/// The result of a completed tool invocation.
21///
22/// Yielded as `AgentEvent::ToolResult` after the tool has finished executing.
23#[derive(Debug, Clone)]
24pub struct ToolCallResult {
25 pub id: String,
26 pub name: String,
27 pub args: Value,
28 pub result: Value,
29}
30
31/// Events emitted by [`AgentStream`][crate::agent::AgentStream].
32///
33/// Each variant represents a distinct, self-contained event in the agent lifecycle:
34///
35/// - `Token(String)` — a text fragment from the assistant. In streaming mode each
36/// `Token` is a single SSE delta; in non-streaming mode the full response text
37/// arrives as one `Token`.
38/// - `ToolCall(ToolCallInfo)` — the model has requested a tool invocation. One event
39/// is emitted per call, before execution begins.
40/// - `ToolResult(ToolCallResult)` — a tool has finished executing. One event is
41/// emitted per call, in the same order as the corresponding `ToolCall` events.
42#[derive(Debug, Clone)]
43pub enum AgentEvent {
44 Token(String),
45 ToolCall(ToolCallInfo),
46 ToolResult(ToolCallResult),
47}
48
49/// An agent that combines a [`Conversation`] with a set of callable tools.
50///
51/// Build one with the fluent builder methods, then call [`chat`][DeepseekAgent::chat]
52/// to start a turn:
53///
54/// ```no_run
55/// use ds_api::{DeepseekAgent, tool};
56/// use serde_json::{Value, json};
57///
58/// struct MyTool;
59///
60/// #[tool]
61/// impl ds_api::Tool for MyTool {
62/// async fn greet(&self, name: String) -> Value {
63/// json!({ "greeting": format!("Hello, {name}!") })
64/// }
65/// }
66///
67/// # #[tokio::main] async fn main() {
68/// let agent = DeepseekAgent::new("sk-...")
69/// .add_tool(MyTool);
70/// # }
71/// ```
72pub struct DeepseekAgent {
73 /// The conversation manages history, the API client, and context-window compression.
74 pub(crate) conversation: Conversation,
75 pub(crate) tools: Vec<Box<dyn Tool>>,
76 pub(crate) tool_index: HashMap<String, usize>,
77 /// When `true` the agent uses SSE streaming for each API turn so `Token` events
78 /// arrive incrementally. When `false` (default) the full response is awaited.
79 pub(crate) streaming: bool,
80 /// The model to use for every API turn. Defaults to `"deepseek-chat"`.
81 pub(crate) model: String,
82}
83
84impl DeepseekAgent {
85 fn from_parts(client: ApiClient, model: impl Into<String>) -> Self {
86 let model = model.into();
87 let summarizer = LlmSummarizer::new(client.clone()).with_model(model.clone());
88 Self {
89 conversation: Conversation::new(client).with_summarizer(summarizer),
90 tools: vec![],
91 tool_index: HashMap::new(),
92 streaming: false,
93 model,
94 }
95 }
96
97 /// Create a new agent targeting the DeepSeek API with `deepseek-chat`.
98 pub fn new(token: impl Into<String>) -> Self {
99 Self::from_parts(ApiClient::new(token), "deepseek-chat")
100 }
101
102 /// Create an agent targeting an OpenAI-compatible provider.
103 ///
104 /// All three parameters are set at construction time and never change:
105 ///
106 /// ```no_run
107 /// use ds_api::DeepseekAgent;
108 ///
109 /// let agent = DeepseekAgent::custom(
110 /// "sk-or-...",
111 /// "https://openrouter.ai/api/v1",
112 /// "meta-llama/llama-3.3-70b-instruct:free",
113 /// );
114 /// ```
115 pub fn custom(
116 token: impl Into<String>,
117 base_url: impl Into<String>,
118 model: impl Into<String>,
119 ) -> Self {
120 let client = ApiClient::new(token).with_base_url(base_url);
121 Self::from_parts(client, model)
122 }
123
124 /// Register a tool (builder-style, supports chaining).
125 ///
126 /// The tool's protocol-level function names are indexed so incoming tool-call
127 /// requests from the model can be dispatched to the correct implementation.
128 pub fn add_tool<TT: Tool + 'static>(mut self, tool: TT) -> Self {
129 let idx = self.tools.len();
130 for raw in tool.raw_tools() {
131 self.tool_index.insert(raw.function.name.clone(), idx);
132 }
133 self.tools.push(Box::new(tool));
134 self
135 }
136
137 /// Push a user message and return an [`AgentStream`][crate::agent::AgentStream]
138 /// that drives the full agent loop (API calls + tool execution).
139 pub fn chat(mut self, user_message: &str) -> crate::agent::stream::AgentStream {
140 self.conversation.push_user_input(user_message);
141 crate::agent::stream::AgentStream::new(self)
142 }
143
144 /// Enable SSE streaming for each API turn (builder-style).
145 pub fn with_streaming(mut self) -> Self {
146 self.streaming = true;
147 self
148 }
149
150 /// Prepend a permanent system prompt to the conversation history (builder-style).
151 ///
152 /// System messages added this way are never removed by the built-in summarizers.
153 pub fn with_system_prompt(mut self, prompt: impl Into<String>) -> Self {
154 self.conversation
155 .add_message(Message::new(Role::System, &prompt.into()));
156 self
157 }
158
159 /// Replace the summarizer used for context-window management (builder-style).
160 pub fn with_summarizer(mut self, summarizer: impl Summarizer + 'static) -> Self {
161 self.conversation = self.conversation.with_summarizer(summarizer);
162 self
163 }
164}