error_handling/
error_handling.rs1#![allow(clippy::uninlined_format_args)]
2use openai_ergonomic::{Client, Config, Error, Result};
15use std::time::Duration;
16use tokio::time::sleep;
17
18#[tokio::main]
19async fn main() -> Result<()> {
20 println!("=== Error Handling Patterns ===\n");
21
22 println!("1. Basic Error Handling:");
24 basic_error_handling().await;
25
26 println!("\n2. Pattern Matching on Errors:");
28 pattern_matching_errors().await;
29
30 println!("\n3. Rate Limit Handling:");
32 rate_limit_handling().await;
33
34 println!("\n4. Token Limit Handling:");
36 token_limit_handling().await;
37
38 println!("\n5. Authentication Error Handling:");
40 auth_error_handling().await?;
41
42 println!("\n6. Network Error Handling:");
44 network_error_handling().await?;
45
46 println!("\n7. Custom Error Context:");
48 custom_error_context().await?;
49
50 println!("\n8. Error Recovery Strategies:");
52 error_recovery_strategies().await?;
53
54 Ok(())
55}
56
57async fn basic_error_handling() {
58 let client = match Client::from_env() {
59 Ok(client_builder) => client_builder.build(),
60 Err(e) => {
61 println!("Failed to create client: {}", e);
62 return;
63 }
64 };
65
66 match client.send_chat(client.chat_simple("Hello")).await {
67 Ok(response) => {
68 if let Some(content) = response.content() {
69 println!("Success: {}", content);
70 } else {
71 println!("Success: (no content)");
72 }
73 }
74 Err(e) => println!("Error: {}", e),
75 }
76}
77
78async fn pattern_matching_errors() {
79 let Ok(client_builder) = Client::from_env() else {
80 return;
81 };
82 let client = client_builder.build();
83
84 let builder = client.chat().user("test");
86 let result = client.send_chat(builder).await;
87
88 match result {
89 Ok(_) => println!("Unexpected success"),
90 Err(e) => match e {
91 Error::Api { message, .. } => {
92 println!("API Error: {}", message);
93 }
94 Error::RateLimit(message) => {
95 println!("Rate limited: {}", message);
96 }
97 Error::Authentication(message) => {
98 println!("Authentication failed: {}", message);
99 }
100 Error::Http(source) => {
101 println!("Network error: {}", source);
102 }
103 Error::Json(source) => {
104 println!("Serialization error: {}", source);
105 }
106 Error::Stream(message) => {
107 println!("Stream error: {}", message);
108 }
109 Error::InvalidRequest(message) => {
110 println!("Invalid request: {}", message);
111 }
112 Error::Config(message) => {
113 println!("Configuration error: {}", message);
114 }
115 _ => {
116 println!("Other error: {}", e);
117 }
118 },
119 }
120}
121
122async fn rate_limit_handling() {
123 const MAX_RETRIES: u32 = 3;
124
125 let Ok(client_builder) = Client::from_env() else {
126 return;
127 };
128 let client = client_builder.build();
129
130 let mut retries = 0;
132
133 loop {
134 match client.send_chat(client.chat_simple("Hello")).await {
135 Ok(response) => {
136 if let Some(content) = response.content() {
137 println!("Success: {}", content);
138 } else {
139 println!("Success: (no content)");
140 }
141 break;
142 }
143 Err(Error::RateLimit(_message)) => {
144 if retries >= MAX_RETRIES {
145 println!("Max retries exceeded");
146 break;
147 }
148
149 let wait_time = Duration::from_secs(1);
150 println!("Rate limited. Waiting {:?} before retry...", wait_time);
151 sleep(wait_time).await;
152 retries += 1;
153 }
154 Err(e) => {
155 println!("Other error: {}", e);
156 break;
157 }
158 }
159 }
160}
161
162async fn token_limit_handling() {
163 let Ok(client_builder) = Client::from_env() else {
164 return;
165 };
166 let client = client_builder.build();
167
168 let long_text = "Lorem ipsum ".repeat(10000);
170
171 match client.send_chat(client.chat_simple(&long_text)).await {
172 Ok(_) => println!("Processed long text successfully"),
173 Err(Error::InvalidRequest(message)) if message.contains("token") => {
174 println!("Token limit issue: {}", message);
175
176 let truncated = &long_text[..1000];
178 println!("Retrying with truncated text...");
179
180 match client.send_chat(client.chat_simple(truncated)).await {
181 Ok(response) => {
182 if let Some(content) = response.content() {
183 println!("Success with truncated: {}", content);
184 } else {
185 println!("Success with truncated: (no content)");
186 }
187 }
188 Err(e) => println!("Still failed: {}", e),
189 }
190 }
191 Err(e) => println!("Other error: {}", e),
192 }
193}
194
195async fn auth_error_handling() -> Result<()> {
196 let config = Config::builder().api_key("invalid-api-key").build();
198 let invalid_client = Client::builder(config)?.build();
199
200 match invalid_client
201 .send_chat(invalid_client.chat_simple("Hello"))
202 .await
203 {
204 Ok(_) => println!("Unexpected success"),
205 Err(Error::Authentication(message)) => {
206 println!("Authentication failed as expected: {}", message);
207
208 println!("Suggestions:");
210 println!("1. Check your OPENAI_API_KEY environment variable");
211 println!("2. Verify API key at https://platform.openai.com/api-keys");
212 println!("3. Ensure your API key has necessary permissions");
213 }
214 Err(e) => println!("Unexpected error type: {}", e),
215 }
216
217 Ok(())
218}
219
220async fn network_error_handling() -> Result<()> {
221 use openai_ergonomic::Config;
222 use reqwest_middleware::ClientBuilder;
223
224 let reqwest_client = reqwest::Client::builder()
226 .timeout(Duration::from_secs(1))
227 .build()
228 .expect("Failed to build reqwest client");
229
230 let http_client = ClientBuilder::new(reqwest_client).build();
231
232 let config = Config::builder()
233 .api_key("test-key")
234 .http_client(http_client)
235 .build();
236
237 let client = Client::builder(config)?.build();
238
239 match client.send_chat(client.chat_simple("Hello")).await {
240 Ok(_) => println!("Unexpected success"),
241 Err(Error::Http(source)) => {
242 println!("Network error as expected: {}", source);
243
244 let mut backoff = Duration::from_millis(100);
246 for attempt in 1..=3 {
247 println!("Retry attempt {} after {:?}", attempt, backoff);
248 sleep(backoff).await;
249 backoff *= 2;
250
251 }
254 }
255 Err(e) => println!("Other error: {}", e),
256 }
257
258 Ok(())
259}
260
261async fn custom_error_context() -> Result<()> {
262 let client = Client::from_env()?.build();
263
264 let result = client
266 .send_chat(client.chat_simple("Analyze this data"))
267 .await
268 .map_err(|e| {
269 eprintln!("Context: Failed during data analysis task");
270 eprintln!("Timestamp: {:?}", std::time::SystemTime::now());
271 eprintln!("Original error: {}", e);
272 e
273 })?;
274
275 if let Some(content) = result.content() {
276 println!("Result: {}", content);
277 } else {
278 println!("Result: (no content)");
279 }
280 Ok(())
281}
282
283async fn error_recovery_strategies() -> Result<()> {
284 let client = Client::from_env()?.build();
285
286 let result = try_with_fallback(&client, "gpt-4o", "gpt-3.5-turbo").await?;
288 println!("Fallback strategy result: {}", result);
289
290 let circuit_breaker = CircuitBreaker::new();
292 if circuit_breaker.is_open() {
293 println!("Circuit breaker is open, skipping API calls");
294 return Ok(());
295 }
296
297 match client.send_chat(client.chat_simple("Test")).await {
298 Ok(response) => {
299 circuit_breaker.record_success();
300 if let Some(content) = response.content() {
301 println!("Circuit breaker success: {}", content);
302 } else {
303 println!("Circuit breaker success: (no content)");
304 }
305 }
306 Err(e) => {
307 circuit_breaker.record_failure();
308 println!("Circuit breaker failure: {}", e);
309 }
310 }
311
312 let hedge_result = hedged_request(&client).await?;
314 println!("Hedged request result: {}", hedge_result);
315
316 Ok(())
317}
318
319async fn try_with_fallback(client: &Client, primary: &str, _fallback: &str) -> Result<String> {
320 let builder = client.chat().user("Hello");
322 match client.send_chat(builder).await {
323 Ok(response) => Ok(response.content().unwrap_or("").to_string()),
324 Err(e) => {
325 println!("Primary model failed ({}): {}, trying fallback", primary, e);
326
327 let fallback_builder = client.chat().user("Hello");
329 client
330 .send_chat(fallback_builder)
331 .await
332 .map(|r| r.content().unwrap_or("").to_string())
333 }
334 }
335}
336
337async fn hedged_request(client: &Client) -> Result<String> {
338 use futures::future::select;
339 use std::pin::pin;
340
341 let request1 = async {
343 client
344 .send_chat(client.chat_simple("Hello from request 1"))
345 .await
346 };
347 let request2 = async {
348 client
349 .send_chat(client.chat_simple("Hello from request 2"))
350 .await
351 };
352
353 let fut1 = pin!(request1);
354 let fut2 = pin!(request2);
355
356 match select(fut1, fut2).await {
358 futures::future::Either::Left((result, _)) => {
359 println!("Request 1 completed first");
360 result.map(|r| r.content().unwrap_or("").to_string())
361 }
362 futures::future::Either::Right((result, _)) => {
363 println!("Request 2 completed first");
364 result.map(|r| r.content().unwrap_or("").to_string())
365 }
366 }
367}
368
369struct CircuitBreaker {
371 failures: std::sync::atomic::AtomicU32,
372 threshold: u32,
373}
374
375impl CircuitBreaker {
376 const fn new() -> Self {
377 Self {
378 failures: std::sync::atomic::AtomicU32::new(0),
379 threshold: 3,
380 }
381 }
382
383 fn is_open(&self) -> bool {
384 self.failures.load(std::sync::atomic::Ordering::Relaxed) >= self.threshold
385 }
386
387 fn record_success(&self) {
388 self.failures.store(0, std::sync::atomic::Ordering::Relaxed);
389 }
390
391 fn record_failure(&self) {
392 self.failures
393 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
394 }
395}