error_forge/recovery/
retry.rs

1use std::marker::PhantomData;
2use std::time::Duration;
3use std::thread;
4use crate::recovery::backoff::{Backoff, ExponentialBackoff, FixedBackoff, LinearBackoff};
5use crate::error::ForgeError;
6
7/// Predicate function to determine if an error is retryable
8pub type RetryPredicate<E> = Box<dyn Fn(&E) -> bool + Send + Sync + 'static>;
9
10/// Enum to hold different backoff strategy types
11pub enum BackoffStrategy {
12    Exponential(ExponentialBackoff),
13    Linear(LinearBackoff),
14    Fixed(FixedBackoff),
15}
16
17impl BackoffStrategy {
18    fn next_delay(&self, attempt: usize) -> Duration {
19        match self {
20            BackoffStrategy::Exponential(b) => b.next_delay(attempt),
21            BackoffStrategy::Linear(b) => b.next_delay(attempt),
22            BackoffStrategy::Fixed(b) => b.next_delay(attempt),
23        }
24    }
25}
26
27/// Executor for retry operations
28pub struct RetryExecutor<E> {
29    max_retries: usize,
30    backoff: BackoffStrategy,
31    retry_if: Option<RetryPredicate<E>>,
32    _marker: PhantomData<E>,
33}
34
35impl<E> RetryExecutor<E> 
36where 
37    E: std::error::Error + 'static
38{
39    /// Create a new retry executor with an exponential backoff strategy
40    pub fn new_exponential() -> Self {
41        Self {
42            max_retries: 3,
43            backoff: BackoffStrategy::Exponential(ExponentialBackoff::default()),
44            retry_if: None,
45            _marker: PhantomData,
46        }
47    }
48    
49    /// Create a new retry executor with a linear backoff strategy
50    pub fn new_linear() -> Self {
51        Self {
52            max_retries: 3,
53            backoff: BackoffStrategy::Linear(LinearBackoff::default()),
54            retry_if: None,
55            _marker: PhantomData,
56        }
57    }
58    
59    /// Create a new retry executor with a fixed backoff strategy
60    pub fn new_fixed(delay_ms: u64) -> Self {
61        Self {
62            max_retries: 3,
63            backoff: BackoffStrategy::Fixed(FixedBackoff::new(delay_ms)),
64            retry_if: None,
65            _marker: PhantomData,
66        }
67    }
68    
69    /// Set the maximum number of retries
70    pub fn with_max_retries(mut self, max_retries: usize) -> Self {
71        self.max_retries = max_retries;
72        self
73    }
74    
75    /// Set a predicate to determine if an error should be retried
76    pub fn with_retry_if<F>(mut self, predicate: F) -> Self 
77    where 
78        F: Fn(&E) -> bool + Send + Sync + 'static
79    {
80        self.retry_if = Some(Box::new(predicate));
81        self
82    }
83    
84    /// Execute a fallible operation with retries
85    pub fn retry<F, T>(&self, mut operation: F) -> Result<T, E>
86    where
87        F: FnMut() -> Result<T, E>
88    {
89        let mut attempt = 0;
90        loop {
91            match operation() {
92                Ok(value) => return Ok(value),
93                Err(err) => {
94                    // Check if we've reached max retries
95                    if attempt >= self.max_retries {
96                        return Err(err);
97                    }
98                    
99                    // Check if this error is retryable
100                    let should_retry = match &self.retry_if {
101                        Some(predicate) => predicate(&err),
102                        None => true,
103                    };
104                    
105                    if !should_retry {
106                        return Err(err);
107                    }
108                    
109                    // Wait according to backoff strategy
110                    let delay = self.backoff.next_delay(attempt);
111                    thread::sleep(delay);
112                    
113                    attempt += 1;
114                }
115            }
116        }
117    }
118    
119    /// Execute a fallible operation with retries using a custom error handler
120    pub fn retry_with_handler<F, H, T>(&self, mut operation: F, mut on_error: H) -> Result<T, E>
121    where
122        F: FnMut() -> Result<T, E>,
123        H: FnMut(&E, usize, Duration),
124    {
125        let mut attempt = 0;
126        loop {
127            match operation() {
128                Ok(value) => return Ok(value),
129                Err(err) => {
130                    // Check if we've reached max retries
131                    if attempt >= self.max_retries {
132                        return Err(err);
133                    }
134                    
135                    // Check if this error is retryable
136                    let should_retry = match &self.retry_if {
137                        Some(predicate) => predicate(&err),
138                        None => true,
139                    };
140                    
141                    if !should_retry {
142                        return Err(err);
143                    }
144                    
145                    // Get the delay for this attempt
146                    let delay = self.backoff.next_delay(attempt);
147                    
148                    // Call the error handler
149                    on_error(&err, attempt, delay);
150                    
151                    // Wait according to backoff strategy
152                    thread::sleep(delay);
153                    
154                    attempt += 1;
155                }
156            }
157        }
158    }
159}
160
161/// Policy for retrying operations
162pub struct RetryPolicy {
163    max_retries: usize,
164    backoff_type: BackoffType,
165}
166
167/// Available backoff types for retry policy
168pub enum BackoffType {
169    Exponential,
170    Linear,
171    Fixed(u64),
172}
173
174impl RetryPolicy {
175    /// Create a new retry policy with exponential backoff
176    pub fn new_exponential() -> Self {
177        Self {
178            max_retries: 3,
179            backoff_type: BackoffType::Exponential,
180        }
181    }
182    
183    /// Create a new retry policy with linear backoff
184    pub fn new_linear() -> Self {
185        Self {
186            max_retries: 3,
187            backoff_type: BackoffType::Linear,
188        }
189    }
190    
191    /// Create a new retry policy with fixed backoff
192    pub fn new_fixed(delay_ms: u64) -> Self {
193        Self {
194            max_retries: 3,
195            backoff_type: BackoffType::Fixed(delay_ms),
196        }
197    }
198    
199    /// Set the maximum number of retries
200    pub fn with_max_retries(mut self, max_retries: usize) -> Self {
201        self.max_retries = max_retries;
202        self
203    }
204    
205    /// Create a retry executor for the given error type
206    pub fn executor<E>(&self) -> RetryExecutor<E>
207    where
208        E: std::error::Error + 'static
209    {
210        let executor = match self.backoff_type {
211            BackoffType::Exponential => RetryExecutor::new_exponential(),
212            BackoffType::Linear => RetryExecutor::new_linear(),
213            BackoffType::Fixed(delay_ms) => RetryExecutor::new_fixed(delay_ms),
214        };
215        
216        executor.with_max_retries(self.max_retries)
217    }
218    
219    /// Create a retry executor specifically for ForgeError types
220    pub fn forge_executor<E>(&self) -> RetryExecutor<E>
221    where
222        E: ForgeError
223    {
224        self.executor::<E>()
225            .with_retry_if(|err| err.is_retryable())
226    }
227    
228    /// Execute a fallible operation with retries
229    pub fn retry<F, T, E>(&self, operation: F) -> Result<T, E>
230    where
231        F: FnMut() -> Result<T, E>,
232        E: std::error::Error + 'static
233    {
234        self.executor::<E>().retry(operation)
235    }
236}
237
238impl Default for RetryPolicy {
239    fn default() -> Self {
240        Self::new_exponential()
241    }
242}