error_rail/async_ext/retry.rs
1//! Async retry utilities with runtime-neutral design.
2//!
3//! This module provides retry policies and functions that work with any async
4//! runtime by accepting a sleep function as a parameter.
5
6use core::future::Future;
7use core::time::Duration;
8
9use crate::traits::TransientError;
10use crate::types::ComposableError;
11
12/// Defines a retry policy for async operations.
13///
14/// Implementations determine when and how long to wait between retry attempts.
15pub trait RetryPolicy: Clone {
16 /// Returns the delay before the next retry attempt, or `None` to stop retrying.
17 ///
18 /// # Arguments
19 ///
20 /// * `attempt` - The current attempt number (0-indexed)
21 ///
22 /// # Returns
23 ///
24 /// - `Some(Duration)` - Wait this duration before retrying
25 /// - `None` - Stop retrying (max attempts reached or policy exhausted)
26 fn next_delay(&mut self, attempt: u32) -> Option<Duration>;
27
28 /// Resets the policy to its initial state.
29 fn reset(&mut self);
30}
31
32/// Exponential backoff retry policy.
33///
34/// Each retry waits exponentially longer than the previous one, up to a maximum
35/// delay. This is the recommended policy for most network operations.
36///
37/// # Example
38///
39/// ```rust
40/// use error_rail::async_ext::ExponentialBackoff;
41/// use core::time::Duration;
42///
43/// let policy = ExponentialBackoff {
44/// initial_delay: Duration::from_millis(100),
45/// max_delay: Duration::from_secs(10),
46/// max_attempts: 5,
47/// multiplier: 2.0,
48/// };
49///
50/// // Delays: 100ms, 200ms, 400ms, 800ms, 1600ms (capped at 10s)
51/// ```
52#[derive(Clone, Debug)]
53pub struct ExponentialBackoff {
54 /// Initial delay before first retry.
55 pub initial_delay: Duration,
56 /// Maximum delay between retries.
57 pub max_delay: Duration,
58 /// Maximum number of retry attempts.
59 pub max_attempts: u32,
60 /// Multiplier applied to delay after each attempt.
61 pub multiplier: f64,
62}
63
64impl Default for ExponentialBackoff {
65 fn default() -> Self {
66 Self {
67 initial_delay: Duration::from_millis(100),
68 max_delay: Duration::from_secs(30),
69 max_attempts: 5,
70 multiplier: 2.0,
71 }
72 }
73}
74
75impl ExponentialBackoff {
76 /// Creates a new exponential backoff policy with default settings.
77 #[inline]
78 pub fn new() -> Self {
79 Self::default()
80 }
81
82 /// Sets the initial delay.
83 #[inline]
84 pub fn with_initial_delay(mut self, delay: Duration) -> Self {
85 self.initial_delay = delay;
86 self
87 }
88
89 /// Sets the maximum delay.
90 #[inline]
91 pub fn with_max_delay(mut self, delay: Duration) -> Self {
92 self.max_delay = delay;
93 self
94 }
95
96 /// Sets the maximum number of attempts.
97 #[inline]
98 pub fn with_max_attempts(mut self, attempts: u32) -> Self {
99 self.max_attempts = attempts;
100 self
101 }
102
103 /// Sets the multiplier.
104 #[inline]
105 pub fn with_multiplier(mut self, multiplier: f64) -> Self {
106 self.multiplier = multiplier;
107 self
108 }
109}
110
111impl RetryPolicy for ExponentialBackoff {
112 fn next_delay(&mut self, attempt: u32) -> Option<Duration> {
113 if attempt >= self.max_attempts {
114 return None;
115 }
116 let delay = Duration::from_secs_f64(
117 self.initial_delay.as_secs_f64() * self.multiplier.powi(attempt as i32),
118 );
119 Some(delay.min(self.max_delay))
120 }
121
122 fn reset(&mut self) {
123 // ExponentialBackoff is stateless, nothing to reset
124 }
125}
126
127/// Fixed delay retry policy.
128///
129/// Waits the same duration between each retry attempt. This is simpler than
130/// exponential backoff but may not be suitable for services under heavy load.
131///
132/// # Example
133///
134/// ```rust
135/// use error_rail::async_ext::FixedDelay;
136/// use core::time::Duration;
137///
138/// let policy = FixedDelay::new(Duration::from_millis(500), 3);
139///
140/// // Delays: 500ms, 500ms, 500ms (then stops)
141/// ```
142#[derive(Clone, Debug)]
143pub struct FixedDelay {
144 /// Delay between retry attempts.
145 pub delay: Duration,
146 /// Maximum number of retry attempts.
147 pub max_attempts: u32,
148}
149
150impl FixedDelay {
151 /// Creates a new fixed delay policy.
152 #[inline]
153 pub fn new(delay: Duration, max_attempts: u32) -> Self {
154 Self { delay, max_attempts }
155 }
156}
157
158impl RetryPolicy for FixedDelay {
159 fn next_delay(&mut self, attempt: u32) -> Option<Duration> {
160 if attempt >= self.max_attempts {
161 None
162 } else {
163 Some(self.delay)
164 }
165 }
166
167 fn reset(&mut self) {}
168}
169
170/// Retries an async operation according to a policy when transient errors occur.
171///
172/// This function is **runtime-neutral**: it accepts a `sleep_fn` parameter that
173/// performs the actual sleeping, allowing it to work with any async runtime.
174///
175/// # Arguments
176///
177/// * `operation` - A closure that returns the future to retry
178/// * `policy` - The retry policy to use
179/// * `sleep_fn` - A function that returns a sleep future for the given duration
180///
181/// # Example
182///
183/// ```rust,ignore
184/// use error_rail::async_ext::{retry_with_policy, ExponentialBackoff};
185///
186/// // With Tokio
187/// let result = retry_with_policy(
188/// || fetch_data(),
189/// ExponentialBackoff::default(),
190/// |d| tokio::time::sleep(d),
191/// ).await;
192///
193/// // With async-std
194/// let result = retry_with_policy(
195/// || fetch_data(),
196/// ExponentialBackoff::default(),
197/// |d| async_std::task::sleep(d),
198/// ).await;
199/// ```
200pub async fn retry_with_policy<F, Fut, T, E, P, S, SFut>(
201 mut operation: F,
202 mut policy: P,
203 sleep_fn: S,
204) -> Result<T, ComposableError<E>>
205where
206 F: FnMut() -> Fut,
207 Fut: Future<Output = Result<T, E>>,
208 E: TransientError,
209 P: RetryPolicy,
210 S: Fn(Duration) -> SFut,
211 SFut: Future<Output = ()>,
212{
213 policy.reset();
214 let mut attempt = 0u32;
215
216 loop {
217 match operation().await {
218 Ok(value) => return Ok(value),
219 Err(e) if e.is_transient() => {
220 if let Some(delay) = policy.next_delay(attempt) {
221 sleep_fn(delay).await;
222 attempt += 1;
223 continue;
224 }
225 // Exhausted retry attempts
226 return Err(ComposableError::new(e)
227 .with_context(crate::context!("exhausted {} retry attempts", attempt + 1)));
228 },
229 Err(e) => {
230 // Permanent error, no retry
231 return Err(ComposableError::new(e)
232 .with_context(crate::context!("permanent error, no retry")));
233 },
234 }
235 }
236}
237
238/// Result of a retry operation with metadata about attempts.
239///
240/// This struct provides detailed information about a retry operation,
241/// including the final result and statistics about the retry process.
242///
243/// # Type Parameters
244///
245/// * `T` - The success type of the operation
246/// * `E` - The error type of the operation
247///
248/// # Example
249///
250/// ```rust,ignore
251/// use error_rail::async_ext::{retry_with_metadata, ExponentialBackoff, RetryResult};
252///
253/// let retry_result: RetryResult<Data, ApiError> = retry_with_metadata(
254/// || fetch_data(),
255/// ExponentialBackoff::default(),
256/// |d| tokio::time::sleep(d),
257/// ).await;
258///
259/// if retry_result.attempts > 1 {
260/// log::warn!(
261/// "Operation succeeded after {} attempts (waited {:?})",
262/// retry_result.attempts,
263/// retry_result.total_wait_time
264/// );
265/// }
266/// ```
267#[derive(Debug)]
268pub struct RetryResult<T, E> {
269 /// The final result of the operation.
270 ///
271 /// Contains `Ok(T)` if the operation eventually succeeded, or
272 /// `Err(ComposableError<E>)` if all retry attempts were exhausted
273 /// or a permanent error occurred.
274 pub result: Result<T, ComposableError<E>>,
275
276 /// Total number of attempts made.
277 ///
278 /// This is always at least 1 (the initial attempt). A value greater
279 /// than 1 indicates that retries occurred.
280 pub attempts: u32,
281
282 /// Total time spent waiting between retries.
283 ///
284 /// This does not include the time spent executing the operation itself,
285 /// only the delays between retry attempts. A value of `Duration::ZERO`
286 /// indicates either immediate success or immediate permanent failure.
287 pub total_wait_time: Duration,
288}
289
290/// Retries an operation with detailed result metadata.
291///
292/// Similar to [`retry_with_policy`], but returns additional information about
293/// the retry process, including the number of attempts made and total wait time.
294///
295/// # Arguments
296///
297/// * `operation` - A closure that returns the future to retry
298/// * `policy` - The retry policy to use
299/// * `sleep_fn` - A function that returns a sleep future for the given duration
300///
301/// # Returns
302///
303/// A [`RetryResult`] containing:
304/// - The final result (success or error with context)
305/// - Total number of attempts made
306/// - Total time spent waiting between retries
307///
308/// # Example
309///
310/// ```rust,ignore
311/// use error_rail::async_ext::{retry_with_metadata, ExponentialBackoff};
312///
313/// let retry_result = retry_with_metadata(
314/// || fetch_data(),
315/// ExponentialBackoff::default(),
316/// |d| tokio::time::sleep(d),
317/// ).await;
318///
319/// println!("Attempts: {}", retry_result.attempts);
320/// println!("Total wait time: {:?}", retry_result.total_wait_time);
321///
322/// match retry_result.result {
323/// Ok(data) => println!("Success: {:?}", data),
324/// Err(e) => println!("Failed after retries: {:?}", e),
325/// }
326/// ```
327pub async fn retry_with_metadata<F, Fut, T, E, P, S, SFut>(
328 mut operation: F,
329 mut policy: P,
330 sleep_fn: S,
331) -> RetryResult<T, E>
332where
333 F: FnMut() -> Fut,
334 Fut: Future<Output = Result<T, E>>,
335 E: TransientError,
336 P: RetryPolicy,
337 S: Fn(Duration) -> SFut,
338 SFut: Future<Output = ()>,
339{
340 policy.reset();
341 let mut attempt = 0u32;
342 let mut total_wait_time = Duration::ZERO;
343
344 let result = loop {
345 match operation().await {
346 Ok(value) => break Ok(value),
347 Err(e) if e.is_transient() => {
348 if let Some(delay) = policy.next_delay(attempt) {
349 total_wait_time += delay;
350 sleep_fn(delay).await;
351 attempt += 1;
352 continue;
353 }
354 break Err(ComposableError::new(e)
355 .with_context(crate::context!("exhausted {} retry attempts", attempt + 1)));
356 },
357 Err(e) => {
358 break Err(ComposableError::new(e)
359 .with_context(crate::context!("permanent error, no retry")));
360 },
361 }
362 };
363
364 RetryResult { result, attempts: attempt + 1, total_wait_time }
365}