Skip to main content

openai_protocol/builders/chat/
response.rs

1//! Builder for ChatCompletionResponse
2//!
3//! Provides an ergonomic fluent API for constructing chat completion responses.
4
5use crate::{chat::*, common::Usage};
6
7/// Builder for ChatCompletionResponse
8///
9/// Provides a fluent interface for constructing chat completion responses with sensible defaults.
10#[must_use = "Builder does nothing until .build() is called"]
11#[derive(Clone, Debug)]
12pub struct ChatCompletionResponseBuilder {
13    id: String,
14    object: String,
15    created: u64,
16    model: String,
17    choices: Vec<ChatChoice>,
18    usage: Option<Usage>,
19    system_fingerprint: Option<String>,
20}
21
22impl ChatCompletionResponseBuilder {
23    /// Create a new builder with required fields
24    ///
25    /// # Arguments
26    /// - `id`: Completion ID (e.g., "chatcmpl_abc123")
27    /// - `model`: Model name used for generation
28    pub fn new(id: impl Into<String>, model: impl Into<String>) -> Self {
29        Self {
30            id: id.into(),
31            object: "chat.completion".to_string(),
32            created: chrono::Utc::now().timestamp() as u64,
33            model: model.into(),
34            choices: Vec::new(),
35            usage: None,
36            system_fingerprint: None,
37        }
38    }
39
40    /// Copy common fields from a ChatCompletionRequest
41    ///
42    /// This populates the model field from the request.
43    pub fn copy_from_request(mut self, request: &ChatCompletionRequest) -> Self {
44        self.model.clone_from(&request.model);
45        self
46    }
47
48    /// Set the object type (default: "chat.completion")
49    pub fn object(mut self, object: impl Into<String>) -> Self {
50        self.object = object.into();
51        self
52    }
53
54    /// Set the creation timestamp (default: current time)
55    pub fn created(mut self, timestamp: u64) -> Self {
56        self.created = timestamp;
57        self
58    }
59
60    /// Set the choices
61    pub fn choices(mut self, choices: Vec<ChatChoice>) -> Self {
62        self.choices = choices;
63        self
64    }
65
66    /// Add a single choice
67    pub fn add_choice(mut self, choice: ChatChoice) -> Self {
68        self.choices.push(choice);
69        self
70    }
71
72    /// Set usage information
73    pub fn usage(mut self, usage: Usage) -> Self {
74        self.usage = Some(usage);
75        self
76    }
77
78    /// Set usage if provided (handles Option)
79    pub fn maybe_usage(mut self, usage: Option<Usage>) -> Self {
80        if let Some(u) = usage {
81            self.usage = Some(u);
82        }
83        self
84    }
85
86    /// Set system fingerprint if provided (handles Option)
87    pub fn maybe_system_fingerprint(mut self, fingerprint: Option<impl Into<String>>) -> Self {
88        if let Some(fp) = fingerprint {
89            self.system_fingerprint = Some(fp.into());
90        }
91        self
92    }
93
94    /// Build the ChatCompletionResponse
95    pub fn build(self) -> ChatCompletionResponse {
96        ChatCompletionResponse {
97            id: self.id,
98            object: self.object,
99            created: self.created,
100            model: self.model,
101            choices: self.choices,
102            usage: self.usage,
103            system_fingerprint: self.system_fingerprint,
104        }
105    }
106}
107
108// ============================================================================
109// Tests
110// ============================================================================
111
112#[cfg(test)]
113mod tests {
114    use super::*;
115
116    #[test]
117    fn test_build_minimal() {
118        let response = ChatCompletionResponse::builder("chatcmpl_123", "gpt-4").build();
119
120        assert_eq!(response.id, "chatcmpl_123");
121        assert_eq!(response.model, "gpt-4");
122        assert_eq!(response.object, "chat.completion");
123        assert!(response.choices.is_empty());
124        assert!(response.usage.is_none());
125        assert!(response.system_fingerprint.is_none());
126    }
127
128    #[test]
129    fn test_build_complete() {
130        let choice = ChatChoice {
131            index: 0,
132            message: ChatCompletionMessage {
133                role: "assistant".to_string(),
134                content: Some("Hello!".to_string()),
135                tool_calls: None,
136                reasoning_content: None,
137            },
138            logprobs: None,
139            finish_reason: Some("stop".to_string()),
140            matched_stop: None,
141            hidden_states: None,
142        };
143
144        let usage = Usage {
145            prompt_tokens: 10,
146            completion_tokens: 20,
147            total_tokens: 30,
148            prompt_tokens_details: None,
149            completion_tokens_details: None,
150        };
151
152        let response = ChatCompletionResponse::builder("chatcmpl_456", "gpt-4")
153            .choices(vec![choice.clone()])
154            .maybe_usage(Some(usage))
155            .maybe_system_fingerprint(Some("fp_123abc"))
156            .build();
157
158        assert_eq!(response.id, "chatcmpl_456");
159        assert_eq!(response.choices.len(), 1);
160        assert_eq!(response.choices[0].index, 0);
161        assert!(response.usage.is_some());
162        assert_eq!(response.system_fingerprint.as_ref().unwrap(), "fp_123abc");
163    }
164
165    #[test]
166    fn test_add_multiple_choices() {
167        let choice1 = ChatChoice {
168            index: 0,
169            message: ChatCompletionMessage {
170                role: "assistant".to_string(),
171                content: Some("Option 1".to_string()),
172                tool_calls: None,
173                reasoning_content: None,
174            },
175            logprobs: None,
176            finish_reason: Some("stop".to_string()),
177            matched_stop: None,
178            hidden_states: None,
179        };
180
181        let choice2 = ChatChoice {
182            index: 1,
183            message: ChatCompletionMessage {
184                role: "assistant".to_string(),
185                content: Some("Option 2".to_string()),
186                tool_calls: None,
187                reasoning_content: None,
188            },
189            logprobs: None,
190            finish_reason: Some("stop".to_string()),
191            matched_stop: None,
192            hidden_states: None,
193        };
194
195        let response = ChatCompletionResponse::builder("chatcmpl_789", "gpt-4")
196            .add_choice(choice1)
197            .add_choice(choice2)
198            .build();
199
200        assert_eq!(response.choices.len(), 2);
201        assert_eq!(response.choices[0].index, 0);
202        assert_eq!(response.choices[1].index, 1);
203    }
204
205    #[test]
206    fn test_copy_from_request() {
207        let request = ChatCompletionRequest {
208            messages: vec![],
209            model: "gpt-3.5-turbo".to_string(),
210            ..Default::default()
211        };
212
213        let response = ChatCompletionResponse::builder("chatcmpl_101", "gpt-4")
214            .copy_from_request(&request)
215            .build();
216
217        assert_eq!(response.model, "gpt-3.5-turbo"); // Copied from request
218    }
219}