gpt5/
lib.rs

1//! # GPT-5 Rust Client Library
2//!
3//! A comprehensive Rust client library for OpenAI's GPT-5 API with support for:
4//! - Function calling and tool usage
5//! - Reasoning capabilities with configurable effort levels
6//! - Verbosity control for response detail
7//! - Streaming and non-streaming responses
8//! - Type-safe enums for all API parameters
9//!
10//! ## Quick Start
11//!
12//! ```rust,no_run
13//! use gpt5::{Gpt5Client, Gpt5Model, Gpt5RequestBuilder, VerbosityLevel};
14//!
15//! #[tokio::main]
16//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
17//!     let client = Gpt5Client::new("your-api-key".to_string());
18//!     
19//!     let response = client
20//!         .simple(Gpt5Model::Gpt5Nano, "Hello, world!")
21//!         .await?;
22//!     
23//!     println!("Response: {}", response);
24//!     Ok(())
25//! }
26//! ```
27//!
28//! ## Advanced Usage with Function Calling
29//!
30//! ```rust,no_run
31//! use gpt5::{Gpt5Client, Gpt5Model, Gpt5RequestBuilder, Tool};
32//! use serde_json::json;
33//!
34//! let weather_tool = Tool {
35//!     tool_type: "function".to_string(),
36//!     name: "get_weather".to_string(),
37//!     description: "Get current weather".to_string(),
38//!     parameters: json!({
39//!         "type": "object",
40//!         "properties": {
41//!             "location": {"type": "string", "description": "City name"}
42//!         },
43//!         "required": ["location"]
44//!     }),
45//! };
46//!
47//! let req = Gpt5RequestBuilder::new(Gpt5Model::Gpt5)
48//!     .input("What's the weather in Boston?")
49//!     .tools(vec![weather_tool])
50//!     .tool_choice("auto")
51//!     .build();
52//! ```
53
54// Module declarations
55mod client;
56mod enums;
57mod models;
58mod requests;
59mod responses;
60
61// Re-export all public types for easy access
62pub use crate::client::Gpt5Client;
63pub use crate::enums::{
64    ContentType, FormatType, OutputType, ReasoningEffort, Role, Status, VerbosityLevel,
65};
66pub use crate::models::Gpt5Model;
67pub use crate::requests::{Gpt5Request, Gpt5RequestBuilder, RequestReasoning, RequestText, Tool};
68pub use crate::responses::{
69    Gpt5Response, InputTokenDetails, OpenAiError, OpenAiErrorDetails, OutputContent,
70    ResponseOutput, ResponseReasoning, ResponseText, ResponseTextFormat, ResponseTokenDetails,
71    ResponseUsage,
72};
73
74#[cfg(test)]
75mod tests {
76    use super::*;
77    use serde_json::json;
78
79    #[test]
80    fn test_gpt5_model_as_str() {
81        assert_eq!(Gpt5Model::Gpt5.as_str(), "gpt-5");
82        assert_eq!(Gpt5Model::Gpt5Mini.as_str(), "gpt-5-mini");
83        assert_eq!(Gpt5Model::Gpt5Nano.as_str(), "gpt-5-nano");
84        assert_eq!(Gpt5Model::Custom("custom".to_string()).as_str(), "custom");
85    }
86
87    #[test]
88    fn test_reasoning_effort_serialization() {
89        let low = ReasoningEffort::Low;
90        let serialized = serde_json::to_string(&low).unwrap();
91        assert_eq!(serialized, "\"low\"");
92
93        let deserialized: ReasoningEffort = serde_json::from_str(&serialized).unwrap();
94        assert_eq!(deserialized, ReasoningEffort::Low);
95    }
96
97    #[test]
98    fn test_verbosity_level_serialization() {
99        let high = VerbosityLevel::High;
100        let serialized = serde_json::to_string(&high).unwrap();
101        assert_eq!(serialized, "\"high\"");
102
103        let deserialized: VerbosityLevel = serde_json::from_str(&serialized).unwrap();
104        assert_eq!(deserialized, VerbosityLevel::High);
105    }
106
107    #[test]
108    fn test_output_type_serialization() {
109        let message = OutputType::Message;
110        let serialized = serde_json::to_string(&message).unwrap();
111        assert_eq!(serialized, "\"message\"");
112
113        let deserialized: OutputType = serde_json::from_str(&serialized).unwrap();
114        assert_eq!(deserialized, OutputType::Message);
115    }
116
117    #[test]
118    fn test_content_type_serialization() {
119        let output_text = ContentType::OutputText;
120        let serialized = serde_json::to_string(&output_text).unwrap();
121        assert_eq!(serialized, "\"output_text\"");
122
123        let deserialized: ContentType = serde_json::from_str(&serialized).unwrap();
124        assert_eq!(deserialized, ContentType::OutputText);
125    }
126
127    #[test]
128    fn test_status_serialization() {
129        let completed = Status::Completed;
130        let serialized = serde_json::to_string(&completed).unwrap();
131        assert_eq!(serialized, "\"completed\"");
132
133        let deserialized: Status = serde_json::from_str(&serialized).unwrap();
134        assert_eq!(deserialized, Status::Completed);
135    }
136
137    #[test]
138    fn test_role_serialization() {
139        let user = Role::User;
140        let serialized = serde_json::to_string(&user).unwrap();
141        assert_eq!(serialized, "\"user\"");
142
143        let deserialized: Role = serde_json::from_str(&serialized).unwrap();
144        assert_eq!(deserialized, Role::User);
145    }
146
147    #[test]
148    fn test_format_type_serialization() {
149        let markdown = FormatType::Markdown;
150        let serialized = serde_json::to_string(&markdown).unwrap();
151        assert_eq!(serialized, "\"markdown\"");
152
153        let deserialized: FormatType = serde_json::from_str(&serialized).unwrap();
154        assert_eq!(deserialized, FormatType::Markdown);
155    }
156
157    #[test]
158    fn test_gpt5_client_creation() {
159        let client = Gpt5Client::new("test-api-key".to_string());
160        // Client should be created successfully
161        assert!(!client.api_key.is_empty());
162    }
163
164    #[test]
165    fn test_gpt5_client_with_base_url() {
166        let client = Gpt5Client::new("test-api-key".to_string())
167            .with_base_url("https://custom-api.example.com".to_string());
168
169        // The base_url should be updated
170        assert_eq!(client.base_url, "https://custom-api.example.com");
171    }
172
173    #[test]
174    fn test_gpt5_request_builder_basic() {
175        let request = Gpt5RequestBuilder::new(Gpt5Model::Gpt5Nano)
176            .input("Hello, world!")
177            .build();
178
179        assert_eq!(request.model, "gpt-5-nano");
180        assert_eq!(request.input, "Hello, world!");
181        assert!(request.reasoning.is_none());
182        assert!(request.tools.is_none());
183        assert!(request.tool_choice.is_none());
184        assert!(request.max_output_tokens.is_none());
185        assert!(request.top_p.is_none());
186        assert!(request.text.is_none());
187        assert!(request.instructions.is_none());
188    }
189
190    #[test]
191    fn test_gpt5_request_builder_complete() {
192        let weather_tool = Tool {
193            tool_type: "function".to_string(),
194            name: "get_weather".to_string(),
195            description: "Get current weather".to_string(),
196            parameters: json!({
197                "type": "object",
198                "properties": {
199                    "location": {"type": "string"}
200                }
201            }),
202        };
203
204        let request = Gpt5RequestBuilder::new(Gpt5Model::Gpt5)
205            .input("What's the weather?")
206            .instructions("Use the weather tool")
207            .reasoning_effort(ReasoningEffort::Medium)
208            .verbosity(VerbosityLevel::High)
209            .tools(vec![weather_tool])
210            .tool_choice("auto")
211            .max_output_tokens(1000)
212            .top_p(0.9)
213            .build();
214
215        assert_eq!(request.model, "gpt-5");
216        assert_eq!(request.input, "What's the weather?");
217        assert_eq!(
218            request.instructions,
219            Some("Use the weather tool".to_string())
220        );
221        assert!(request.reasoning.is_some());
222        assert!(request.tools.is_some());
223        assert_eq!(request.tool_choice, Some("auto".to_string()));
224        assert_eq!(request.max_output_tokens, Some(1000));
225        assert_eq!(request.top_p, Some(0.9));
226        assert!(request.text.is_some());
227    }
228
229    #[test]
230    fn test_gpt5_request_serialization() {
231        let request = Gpt5RequestBuilder::new(Gpt5Model::Gpt5Nano)
232            .input("Test input")
233            .max_output_tokens(100)
234            .build();
235
236        let serialized = serde_json::to_string(&request).unwrap();
237        let deserialized: Gpt5Request = serde_json::from_str(&serialized).unwrap();
238
239        assert_eq!(deserialized.model, request.model);
240        assert_eq!(deserialized.input, request.input);
241        assert_eq!(deserialized.max_output_tokens, request.max_output_tokens);
242    }
243
244    #[test]
245    fn test_gpt5_response_deserialization() {
246        let sample_response = json!({
247            "id": "resp_123",
248            "object": "response",
249            "created_at": 1234567890,
250            "status": "completed",
251            "model": "gpt-5-nano",
252            "output": [
253                {
254                    "type": "message",
255                    "id": "msg_123",
256                    "content": [
257                        {
258                            "type": "output_text",
259                            "text": "Hello, world!"
260                        }
261                    ]
262                }
263            ],
264            "usage": {
265                "input_tokens": 10,
266                "output_tokens": 5,
267                "total_tokens": 15
268            }
269        });
270
271        let response: Gpt5Response = serde_json::from_value(sample_response).unwrap();
272
273        assert_eq!(response.id, Some("resp_123".to_string()));
274        assert_eq!(response.object, Some("response".to_string()));
275        assert_eq!(response.status, Some(Status::Completed));
276        assert_eq!(response.model, Some("gpt-5-nano".to_string()));
277        assert!(response.output.is_some());
278        assert!(response.usage.is_some());
279    }
280
281    #[test]
282    fn test_gpt5_response_text_extraction() {
283        let sample_response = json!({
284            "output": [
285                {
286                    "type": "message",
287                    "content": [
288                        {
289                            "type": "output_text",
290                            "text": "Hello, world!"
291                        }
292                    ]
293                }
294            ]
295        });
296
297        let response: Gpt5Response = serde_json::from_value(sample_response).unwrap();
298        let text = response.text();
299
300        assert_eq!(text, Some("Hello, world!".to_string()));
301    }
302
303    #[test]
304    fn test_gpt5_response_function_calls() {
305        let sample_response = json!({
306            "output": [
307                {
308                    "type": "function_call",
309                    "name": "get_weather",
310                    "arguments": "{\"location\": \"Boston\"}"
311                },
312                {
313                    "type": "message",
314                    "content": [
315                        {
316                            "type": "output_text",
317                            "text": "I'll check the weather for you."
318                        }
319                    ]
320                }
321            ]
322        });
323
324        let response: Gpt5Response = serde_json::from_value(sample_response).unwrap();
325        let function_calls = response.function_calls();
326
327        assert_eq!(function_calls.len(), 1);
328        assert_eq!(function_calls[0].name, Some("get_weather".to_string()));
329        assert_eq!(
330            function_calls[0].arguments,
331            Some("{\"location\": \"Boston\"}".to_string())
332        );
333    }
334
335    #[test]
336    fn test_gpt5_response_completion_status() {
337        let completed_response = json!({
338            "status": "completed"
339        });
340
341        let response: Gpt5Response = serde_json::from_value(completed_response).unwrap();
342        assert!(response.is_completed());
343
344        let incomplete_response = json!({
345            "status": "incomplete"
346        });
347
348        let response: Gpt5Response = serde_json::from_value(incomplete_response).unwrap();
349        assert!(!response.is_completed());
350    }
351
352    #[test]
353    fn test_gpt5_response_token_usage() {
354        let sample_response = json!({
355            "usage": {
356                "input_tokens": 10,
357                "output_tokens": 5,
358                "total_tokens": 15,
359                "output_tokens_details": {
360                    "reasoning_tokens": 3
361                }
362            }
363        });
364
365        let response: Gpt5Response = serde_json::from_value(sample_response).unwrap();
366
367        assert_eq!(response.total_tokens(), 15);
368        assert_eq!(response.reasoning_tokens(), Some(3));
369    }
370
371    #[test]
372    fn test_error_response_deserialization() {
373        let error_response = json!({
374            "error": {
375                "message": "Invalid API key",
376                "type": "invalid_request_error",
377                "param": "api_key",
378                "code": "invalid_api_key"
379            }
380        });
381
382        let error: OpenAiError = serde_json::from_value(error_response).unwrap();
383
384        assert_eq!(error.error.message, "Invalid API key");
385        assert_eq!(error.error.error_type, "invalid_request_error");
386        assert_eq!(error.error.param, Some("api_key".to_string()));
387        assert_eq!(error.error.code, Some("invalid_api_key".to_string()));
388    }
389
390    #[test]
391    fn test_builder_method_chaining() {
392        let request = Gpt5RequestBuilder::new(Gpt5Model::Gpt5)
393            .input("Test")
394            .instructions("Be helpful")
395            .reasoning_effort(ReasoningEffort::High)
396            .verbosity(VerbosityLevel::Medium)
397            .max_output_tokens(500)
398            .top_p(0.8)
399            .param("custom_param", "custom_value")
400            .build();
401
402        assert_eq!(request.input, "Test");
403        assert_eq!(request.instructions, Some("Be helpful".to_string()));
404        assert!(request.reasoning.is_some());
405        assert!(request.text.is_some());
406        assert_eq!(request.max_output_tokens, Some(500));
407        assert_eq!(request.top_p, Some(0.8));
408        assert!(request.parameters.contains_key("custom_param"));
409    }
410
411    #[test]
412    fn test_enum_equality() {
413        assert_eq!(ReasoningEffort::Low, ReasoningEffort::Low);
414        assert_ne!(ReasoningEffort::Low, ReasoningEffort::High);
415
416        assert_eq!(VerbosityLevel::Medium, VerbosityLevel::Medium);
417        assert_ne!(VerbosityLevel::Low, VerbosityLevel::High);
418
419        assert_eq!(Status::Completed, Status::Completed);
420        assert_ne!(Status::Completed, Status::InProgress);
421    }
422
423    #[test]
424    fn test_unknown_enum_values() {
425        let unknown_reasoning = ReasoningEffort::Unknown("custom_effort".to_string());
426        let serialized = serde_json::to_string(&unknown_reasoning).unwrap();
427        assert_eq!(serialized, "\"custom_effort\"");
428
429        let deserialized: ReasoningEffort = serde_json::from_str("\"custom_effort\"").unwrap();
430        assert_eq!(
431            deserialized,
432            ReasoningEffort::Unknown("custom_effort".to_string())
433        );
434    }
435
436    #[test]
437    fn test_tool_creation() {
438        let tool = Tool {
439            tool_type: "function".to_string(),
440            name: "test_function".to_string(),
441            description: "A test function".to_string(),
442            parameters: json!({
443                "type": "object",
444                "properties": {
445                    "param1": {"type": "string"}
446                }
447            }),
448        };
449
450        assert_eq!(tool.tool_type, "function");
451        assert_eq!(tool.name, "test_function");
452        assert_eq!(tool.description, "A test function");
453    }
454
455    #[test]
456    fn test_multiple_tools() {
457        let tool1 = Tool {
458            tool_type: "function".to_string(),
459            name: "tool1".to_string(),
460            description: "First tool".to_string(),
461            parameters: json!({}),
462        };
463
464        let tool2 = Tool {
465            tool_type: "function".to_string(),
466            name: "tool2".to_string(),
467            description: "Second tool".to_string(),
468            parameters: json!({}),
469        };
470
471        let request = Gpt5RequestBuilder::new(Gpt5Model::Gpt5)
472            .input("Use both tools")
473            .tools(vec![tool1, tool2])
474            .tool_choice("auto")
475            .build();
476
477        assert!(request.tools.is_some());
478        assert_eq!(request.tools.unwrap().len(), 2);
479        assert_eq!(request.tool_choice, Some("auto".to_string()));
480    }
481
482    #[test]
483    fn test_all_text_extraction() {
484        let sample_response = json!({
485            "output": [
486                {
487                    "type": "message",
488                    "content": [
489                        {
490                            "type": "output_text",
491                            "text": "First message"
492                        }
493                    ]
494                },
495                {
496                    "type": "message",
497                    "content": [
498                        {
499                            "type": "output_text",
500                            "text": "Second message"
501                        }
502                    ]
503                }
504            ]
505        });
506
507        let response: Gpt5Response = serde_json::from_value(sample_response).unwrap();
508        let all_texts = response.all_text();
509
510        assert_eq!(all_texts.len(), 2);
511        assert_eq!(all_texts[0], "First message");
512        assert_eq!(all_texts[1], "Second message");
513    }
514
515    #[test]
516    fn test_has_error() {
517        let error_response = json!({
518            "error": {"message": "Test error"}
519        });
520
521        let response: Gpt5Response = serde_json::from_value(error_response).unwrap();
522        assert!(response.has_error());
523
524        let success_response = json!({
525            "status": "completed"
526        });
527
528        let response: Gpt5Response = serde_json::from_value(success_response).unwrap();
529        assert!(!response.has_error());
530    }
531
532    #[test]
533    fn test_user_text_alias() {
534        let request = Gpt5RequestBuilder::new(Gpt5Model::Gpt5Nano)
535            .user_text("Hello from user_text method")
536            .build();
537
538        assert_eq!(request.input, "Hello from user_text method");
539    }
540
541    #[test]
542    fn test_validation_warnings() {
543        // Test with empty input - should not panic but log warning
544        let request = Gpt5RequestBuilder::new(Gpt5Model::Gpt5Nano)
545            .input("")
546            .max_output_tokens(5) // Very low token count
547            .build();
548
549        assert_eq!(request.input, "");
550        assert_eq!(request.max_output_tokens, Some(5));
551    }
552}