1use super::tools::{CallToolResult, ToolCall};
2use serde::{Deserialize, Serialize};
3use uuid::Uuid;
4
5#[derive(Debug, Clone, Serialize, Deserialize)]
6pub struct AiResponse {
7 pub request_id: Uuid,
8 pub content: String,
9 pub provider: String,
10 pub model: String,
11 #[serde(default, skip_serializing_if = "Option::is_none")]
12 pub tokens_used: Option<u32>,
13 #[serde(default, skip_serializing_if = "Option::is_none")]
14 pub input_tokens: Option<u32>,
15 #[serde(default, skip_serializing_if = "Option::is_none")]
16 pub output_tokens: Option<u32>,
17 pub latency_ms: u64,
18 #[serde(default, skip_serializing_if = "Vec::is_empty")]
19 pub tool_calls: Vec<ToolCall>,
20 #[serde(default, skip_serializing_if = "Vec::is_empty")]
21 pub tool_results: Vec<CallToolResult>,
22 #[serde(default, skip_serializing_if = "Option::is_none")]
23 pub finish_reason: Option<String>,
24 pub cache_hit: bool,
25 #[serde(default, skip_serializing_if = "Option::is_none")]
26 pub cache_read_tokens: Option<u32>,
27 #[serde(default, skip_serializing_if = "Option::is_none")]
28 pub cache_creation_tokens: Option<u32>,
29 pub is_streaming: bool,
30}
31
32impl Default for AiResponse {
33 fn default() -> Self {
34 Self {
35 request_id: Uuid::nil(),
36 content: String::new(),
37 provider: String::new(),
38 model: String::new(),
39 tokens_used: None,
40 input_tokens: None,
41 output_tokens: None,
42 latency_ms: 0,
43 tool_calls: Vec::new(),
44 tool_results: Vec::new(),
45 finish_reason: None,
46 cache_hit: false,
47 cache_read_tokens: None,
48 cache_creation_tokens: None,
49 is_streaming: false,
50 }
51 }
52}
53
54impl AiResponse {
55 pub fn new(request_id: Uuid, content: String, provider: String, model: String) -> Self {
56 Self {
57 request_id,
58 content,
59 provider,
60 model,
61 ..Default::default()
62 }
63 }
64
65 pub const fn with_tokens(mut self, tokens_used: u32) -> Self {
66 self.tokens_used = Some(tokens_used);
67 self
68 }
69
70 pub const fn with_latency(mut self, latency_ms: u64) -> Self {
71 self.latency_ms = latency_ms;
72 self
73 }
74
75 pub const fn with_streaming(mut self, is_streaming: bool) -> Self {
76 self.is_streaming = is_streaming;
77 self
78 }
79
80 pub fn with_tool_calls(mut self, tool_calls: Vec<ToolCall>) -> Self {
81 self.tool_calls = tool_calls;
82 self
83 }
84
85 pub fn with_tool_results(mut self, tool_results: Vec<CallToolResult>) -> Self {
86 self.tool_results = tool_results;
87 self
88 }
89
90 pub fn has_tool_calls(&self) -> bool {
91 !self.tool_calls.is_empty()
92 }
93
94 pub fn has_tool_results(&self) -> bool {
95 !self.tool_results.is_empty()
96 }
97}
98
99#[derive(Debug, Clone)]
100pub enum StreamChunk {
101 Text(String),
102 Usage {
103 input_tokens: Option<u32>,
104 output_tokens: Option<u32>,
105 tokens_used: Option<u32>,
106 cache_read_tokens: Option<u32>,
107 cache_creation_tokens: Option<u32>,
108 finish_reason: Option<String>,
109 },
110}
111
112#[derive(Debug, Clone, Serialize, Deserialize)]
113pub struct WebSource {
114 pub title: String,
115 pub uri: String,
116 pub relevance: f32,
117}
118
119#[derive(Debug, Clone, Serialize, Deserialize)]
120pub struct UrlMetadata {
121 pub retrieved_url: String,
122 pub url_retrieval_status: String,
123}
124
125#[derive(Debug, Clone, Serialize, Deserialize)]
126pub struct SearchGroundedResponse {
127 pub content: String,
128 pub sources: Vec<WebSource>,
129 pub confidence_scores: Vec<f32>,
130 pub web_search_queries: Vec<String>,
131 #[serde(default, skip_serializing_if = "Option::is_none")]
132 pub url_context_metadata: Option<Vec<UrlMetadata>>,
133 #[serde(default, skip_serializing_if = "Option::is_none")]
134 pub tokens_used: Option<u32>,
135 pub latency_ms: u64,
136 #[serde(default, skip_serializing_if = "Option::is_none")]
137 pub finish_reason: Option<String>,
138 #[serde(default, skip_serializing_if = "Option::is_none")]
139 pub safety_ratings: Option<Vec<serde_json::Value>>,
140}