systemprompt_agent/services/a2a_server/processing/
ai_executor.rs1use anyhow::Result;
2use futures::StreamExt;
3use std::sync::Arc;
4use tokio::sync::mpsc;
5
6use crate::models::a2a::Artifact;
7use crate::services::SkillService;
8use systemprompt_models::{
9 AiMessage, AiProvider, AiRequest, CallToolResult, MessageRole, RequestContext, ToolCall,
10 ToolResultFormatter,
11};
12
13use super::message::StreamEvent;
14use crate::models::AgentRuntimeInfo;
15
16fn resolve_provider_config(
17 request_context: &RequestContext,
18 agent_runtime: &AgentRuntimeInfo,
19 ai_service: &dyn AiProvider,
20) -> (String, String, u32) {
21 if let Some(config) = request_context.tool_model_config() {
22 let provider = config
23 .provider
24 .as_deref()
25 .or(agent_runtime.provider.as_deref())
26 .unwrap_or_else(|| ai_service.default_provider())
27 .to_string();
28 let model = config
29 .model
30 .as_deref()
31 .or(agent_runtime.model.as_deref())
32 .unwrap_or_else(|| ai_service.default_model())
33 .to_string();
34 let max_tokens = config
35 .max_output_tokens
36 .or(agent_runtime.max_output_tokens)
37 .unwrap_or_else(|| ai_service.default_max_output_tokens());
38
39 tracing::debug!(
40 provider,
41 model,
42 max_output_tokens = max_tokens,
43 "Using tool_model_config"
44 );
45
46 return (provider, model, max_tokens);
47 }
48
49 let provider = agent_runtime
50 .provider
51 .as_deref()
52 .unwrap_or_else(|| ai_service.default_provider())
53 .to_string();
54 let model = agent_runtime
55 .model
56 .as_deref()
57 .unwrap_or_else(|| ai_service.default_model())
58 .to_string();
59 let max_tokens = agent_runtime
60 .max_output_tokens
61 .unwrap_or_else(|| ai_service.default_max_output_tokens());
62
63 (provider, model, max_tokens)
64}
65
66pub async fn synthesize_tool_results_with_artifacts(
67 ai_service: Arc<dyn AiProvider>,
68 agent_runtime: &AgentRuntimeInfo,
69 original_messages: Vec<AiMessage>,
70 initial_response: &str,
71 tool_calls: &[ToolCall],
72 tool_results: &[CallToolResult],
73 artifacts: &[Artifact],
74 tx: mpsc::UnboundedSender<StreamEvent>,
75 request_context: RequestContext,
76 _skill_service: Arc<SkillService>,
77) -> Result<String, ()> {
78 let tool_results_context = ToolResultFormatter::format_for_synthesis(tool_calls, tool_results);
79 let artifact_references = build_artifact_references(artifacts);
80
81 let synthesis_prompt = build_synthesis_prompt(
82 tool_calls.len(),
83 &tool_results_context,
84 &artifact_references,
85 );
86
87 let mut synthesis_messages = original_messages;
88 synthesis_messages.push(AiMessage {
89 role: MessageRole::Assistant,
90 content: initial_response.to_string(),
91 parts: Vec::new(),
92 });
93 synthesis_messages.push(AiMessage {
94 role: MessageRole::User,
95 content: synthesis_prompt,
96 parts: Vec::new(),
97 });
98
99 tracing::info!(
100 tool_result_count = tool_results.len(),
101 "Calling AI to synthesize tool results"
102 );
103
104 let (provider, model, max_output_tokens) =
105 resolve_provider_config(&request_context, agent_runtime, ai_service.as_ref());
106
107 let synthesis_request = AiRequest::builder(
108 synthesis_messages,
109 &provider,
110 &model,
111 max_output_tokens,
112 request_context,
113 )
114 .build();
115
116 match ai_service.generate(&synthesis_request).await {
117 Ok(response) => {
118 let synthesized_text = response.content;
119
120 tracing::info!(text_len = synthesized_text.len(), "Synthesis complete");
121
122 if tx
123 .send(StreamEvent::Text(synthesized_text.clone()))
124 .is_err()
125 {
126 tracing::debug!("Stream receiver dropped during synthesis");
127 }
128
129 Ok(synthesized_text)
130 },
131 Err(e) => {
132 tracing::error!(error = %e, "Synthesis failed");
133 Err(())
134 },
135 }
136}
137
138pub async fn process_without_tools(
139 ai_service: Arc<dyn AiProvider>,
140 agent_runtime: &AgentRuntimeInfo,
141 ai_messages: Vec<AiMessage>,
142 tx: mpsc::UnboundedSender<StreamEvent>,
143 request_context: RequestContext,
144) -> Result<(String, Vec<ToolCall>, Vec<CallToolResult>), ()> {
145 let (provider, model, max_output_tokens) =
146 resolve_provider_config(&request_context, agent_runtime, ai_service.as_ref());
147
148 let generate_request = AiRequest::builder(
149 ai_messages,
150 &provider,
151 &model,
152 max_output_tokens,
153 request_context,
154 )
155 .build();
156
157 match ai_service.generate_stream(&generate_request).await {
158 Ok(mut stream) => {
159 let mut accumulated_text = String::new();
160 while let Some(chunk) = stream.next().await {
161 match chunk {
162 Ok(text) => {
163 accumulated_text.push_str(&text);
164 if tx.send(StreamEvent::Text(text)).is_err() {
165 tracing::debug!("Stream receiver dropped during generation");
166 }
167 },
168 Err(e) => {
169 if tx.send(StreamEvent::Error(e.to_string())).is_err() {
170 tracing::debug!("Stream receiver dropped while sending error");
171 }
172 return Err(());
173 },
174 }
175 }
176 Ok((accumulated_text, Vec::new(), Vec::new()))
177 },
178 Err(e) => {
179 if tx.send(StreamEvent::Error(e.to_string())).is_err() {
180 tracing::debug!("Stream receiver dropped while sending error");
181 }
182 Err(())
183 },
184 }
185}
186
187fn build_synthesis_prompt(
188 tool_count: usize,
189 tool_results_context: &str,
190 artifact_references: &str,
191) -> String {
192 format!(
193 r#"# Tool Execution Complete
194
195You executed {} tool(s). Now provide a BRIEF conversational response.
196
197## Tool Results Summary
198
199{}
200
201## Artifacts Created
202
203{}
204
205## CRITICAL RULES - READ CAREFULLY
206
2071. **NEVER repeat artifact content** - The user sees artifacts separately. Your message should REFERENCE them, never duplicate their content.
2082. **Maximum 100 words** - Be extremely concise. 2-3 sentences is ideal.
2093. **Describe what was done, not what it contains** - Say "I've created a blog post about X" NOT "Here is the blog post: [full content]"
2104. **Be conversational** - Natural, friendly summary. Not a report or transcript.
2115. **Reference artifacts naturally** - Use format like "(see the artifact for the full content)"
212
213## BAD EXAMPLE (DO NOT DO THIS)
214"I've created your blog post. Here's the content:
215
216[2000 words of article text]
217
218Let me know if you'd like any changes."
219
220## GOOD EXAMPLE
221"Done! I've created a blog post exploring the Human-AI collaboration workflow. The article covers the key differences between automation and augmentation approaches, with practical steps for maintaining your authentic voice. Take a look at the artifact and let me know if you'd like any adjustments."
222
223---
224
225Provide your brief, conversational response now. Remember: the artifact has the content - your message is just the friendly summary."#,
226 tool_count, tool_results_context, artifact_references
227 )
228}
229
230fn build_artifact_references(artifacts: &[Artifact]) -> String {
231 if artifacts.is_empty() {
232 return "No artifacts were created.".to_string();
233 }
234
235 artifacts
236 .iter()
237 .map(|artifact| {
238 let artifact_type = &artifact.metadata.artifact_type;
239 let artifact_name = artifact
240 .name
241 .clone()
242 .unwrap_or_else(|| artifact.id.to_string());
243
244 format!(
245 "- **{}** ({}): Reference as '(see {} for details)'",
246 artifact_name, artifact_type, artifact_name
247 )
248 })
249 .collect::<Vec<_>>()
250 .join("\n")
251}