conquer_util/backoff.rs
1//! Convenience type for ergonomically pursuing an exponential back-off busy
2//! waiting strategy in order to reduce contention on shared memory and caches
3//! in a concurrent environment.
4
5#[deny(unsafe_code)]
6#[cfg(feature = "std")]
7use std::time::{Duration, Instant};
8
9use core::cell::RefCell;
10use core::fmt;
11use core::sync::atomic;
12#[cfg(feature = "random")]
13use core::sync::atomic::{AtomicUsize, Ordering};
14
15#[cfg(feature = "random")]
16use rand::{rngs::SmallRng, Rng, SeedableRng};
17
18////////////////////////////////////////////////////////////////////////////////////////////////////
19// BackOff
20////////////////////////////////////////////////////////////////////////////////////////////////////
21
22/// A type for exponential back-off in tight loops.
23///
24/// In concurrent environments it can often be beneficial to back off from
25/// accessing shared variables in loops in order to reduce contention and
26/// improve performance for all participating threads by spinning for a short
27/// amount of time.
28#[derive(Clone)]
29pub struct BackOff {
30 strategy: RefCell<Strategy>,
31}
32
33/********** impl inherent *************************************************************************/
34
35impl Default for BackOff {
36 #[inline]
37 fn default() -> Self {
38 Self::new()
39 }
40}
41
42/********** impl inherent *************************************************************************/
43
44impl BackOff {
45 /// Creates a new [`BackOff`] instance with a fixed exponential back-off
46 /// strategy.
47 #[inline]
48 pub const fn new() -> Self {
49 Self { strategy: RefCell::new(Strategy::constant()) }
50 }
51
52 /// Spin once.
53 ///
54 /// This is a convenience wrapper for
55 /// [`spin_loop_hint`][core::sync::atomic::spin_loop_hint], but will never
56 /// compile to only a nop on platforms, that don't offer a `wait`-like CPU
57 /// instruction, but will instead result in an empty function call.
58 #[inline(never)]
59 pub fn spin_once() {
60 atomic::spin_loop_hint();
61 }
62
63 /// Resets the [`BackOff`] instance to its initial state.
64 #[inline]
65 pub fn reset(&self) {
66 self.strategy.borrow_mut().reset();
67 }
68
69 /// Spins for a bounded number of steps
70 ///
71 /// On CPUs that support such instructions, in each step the processor will
72 /// be instructed to deliberately slow down, e.g. using the `pause`
73 /// instruction on x86, which can also save energy.
74 ///
75 /// Each invocation of this method exponentially increases the number of
76 /// spin cycles until a point at which further spinning is no longer
77 /// advisable and other strategies, such as yielding the current thread to
78 /// the OS, should be preferred.
79 /// From this point on, the number of spin cycles remains constant with each
80 /// further invocation of [`spin`][BackOff::spin].
81 ///
82 /// Whether this point has been reached can be determined through the
83 /// [`advise_yield`][BackOff::advise_yield] method.
84 #[inline]
85 pub fn spin(&self) {
86 let steps = self.strategy.borrow_mut().exponential_backoff();
87 for _ in 0..steps {
88 Self::spin_once();
89 }
90 }
91
92 /// Returns `true` if further spinning is not advisable and other means such
93 /// as voluntarily yielding the current thread could be more efficient.
94 ///
95 /// # Examples
96 ///
97 /// Back-off exponentially until it is no longer advisable.
98 ///
99 /// ```
100 /// use conquer_util::BackOff;
101 ///
102 /// let mut backoff = BackOff::new();
103 /// while !backoff.advise_yield() {
104 /// backoff.spin();
105 /// }
106 /// ```
107 ///
108 /// Repedeatly check a condition and either back-off exponentially or yield
109 /// the current thread, if the condition is not yet met.
110 ///
111 /// ```
112 /// use conquer_util::BackOff;
113 ///
114 /// # let cond = true;
115 ///
116 /// let mut backoff = BackOff::new();
117 /// while !cond {
118 /// if backoff.advise_yield() {
119 /// std::thread::yield_now();
120 /// } else {
121 /// backoff.spin();
122 /// }
123 /// }
124 /// ```
125 ///
126 /// # Notes
127 ///
128 /// On an Intel(R) i5 with 2.60 GHz a full back-off cycle has been measured
129 /// to take approximately 750 nanoseconds
130 #[inline]
131 pub fn advise_yield(&self) -> bool {
132 self.strategy.borrow().advise_yield()
133 }
134}
135
136#[cfg(feature = "random")]
137impl BackOff {
138 /// Creates a new [`BackOff`] instance with a randomized exponential
139 /// back-off strategy.
140 pub fn random() -> Self {
141 Self { strategy: RefCell::new(Strategy::random()) }
142 }
143
144 /// Creates a new [`BackOff`] instance with a randomized exponential
145 /// back-off strategy using the given `seed` value.
146 pub fn random_with_seed(seed: u64) -> Self {
147 Self { strategy: RefCell::new(Strategy::random_with_seed(seed)) }
148 }
149}
150
151#[cfg(feature = "std")]
152impl BackOff {
153 /// Spins *at least* for the specified `dur`.
154 ///
155 /// If a very short duration is specified, this function may spin for a
156 /// longer, platform-specific minimum time.
157 pub fn spin_for(dur: Duration) {
158 let now = Instant::now();
159 let end = now + dur;
160
161 while Instant::now() < end {
162 Self::spin_once();
163 }
164 }
165
166 /// Cooperatively yields the current thread.
167 ///
168 /// This is a convenience wrapper for
169 /// [`thread::yield_now`][std::thread::yield_now]
170 #[inline]
171 pub fn yield_now() {
172 std::thread::yield_now();
173 }
174}
175
176/********** impl Debug ****************************************************************************/
177
178impl fmt::Debug for BackOff {
179 #[inline]
180 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
181 f.debug_struct("BackOff").field("advise_yield", &self.advise_yield()).finish()
182 }
183}
184
185/********** impl Display **************************************************************************/
186
187impl fmt::Display for BackOff {
188 #[inline]
189 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
190 write!(f, "advise yield: {}", self.advise_yield())
191 }
192}
193
194////////////////////////////////////////////////////////////////////////////////////////////////////
195// Strategy
196////////////////////////////////////////////////////////////////////////////////////////////////////
197
198#[derive(Clone, Debug)]
199enum Strategy {
200 Const {
201 pow: u32,
202 },
203 #[cfg(feature = "random")]
204 Random {
205 pow: u32,
206 rng: SmallRng,
207 },
208}
209
210/********** impl inherent *************************************************************************/
211
212impl Strategy {
213 const INIT_POW: u32 = 1;
214 const SPIN_LIMIT_POW: u32 = 7;
215
216 #[inline]
217 const fn constant() -> Self {
218 Strategy::Const { pow: Self::INIT_POW }
219 }
220
221 #[inline]
222 fn exponential_backoff(&mut self) -> u32 {
223 match self {
224 Strategy::Const { pow } => {
225 let steps = 1 << *pow;
226
227 if *pow < Self::SPIN_LIMIT_POW {
228 *pow += 1;
229 }
230
231 steps
232 }
233 #[cfg(feature = "random")]
234 Strategy::Random { pow, rng } => {
235 let low = 1 << (*pow - 1);
236 let high = 1 << *pow;
237
238 if *pow < Self::SPIN_LIMIT_POW {
239 *pow += 1;
240 }
241
242 rng.gen_range(low, high)
243 }
244 }
245 }
246
247 #[inline]
248 fn reset(&mut self) {
249 let pow = match self {
250 Strategy::Const { pow } => pow,
251 #[cfg(feature = "random")]
252 Strategy::Random { pow, .. } => pow,
253 };
254
255 *pow = Self::INIT_POW;
256 }
257
258 #[inline]
259 fn advise_yield(&self) -> bool {
260 let pow = match self {
261 Strategy::Const { pow } => *pow,
262 #[cfg(feature = "random")]
263 Strategy::Random { pow, .. } => *pow,
264 };
265
266 pow == Self::SPIN_LIMIT_POW
267 }
268}
269
270#[cfg(feature = "random")]
271impl Strategy {
272 #[inline]
273 fn random() -> Self {
274 #[cfg(target_pointer_width = "32")]
275 const INIT_SEED: usize = 0x608c_dbfc;
276 #[cfg(target_pointer_width = "64")]
277 const INIT_SEED: usize = 0xd1dc_dceb_2fb4_70f3;
278 const SEED_INCREMENT: usize = 51;
279
280 static GLOBAL_SEED: AtomicUsize = AtomicUsize::new(INIT_SEED);
281 let seed = GLOBAL_SEED.fetch_add(SEED_INCREMENT, Ordering::Relaxed) as u64;
282
283 Strategy::Random { pow: Self::INIT_POW, rng: SmallRng::seed_from_u64(seed) }
284 }
285
286 #[inline]
287 fn random_with_seed(seed: u64) -> Self {
288 Strategy::Random { pow: Self::INIT_POW, rng: SmallRng::seed_from_u64(seed) }
289 }
290}
291
292#[cfg(test)]
293mod tests {
294 use super::{BackOff, Strategy};
295
296 #[test]
297 fn spin_full_const() {
298 let backoff = BackOff::new();
299 let mut steps = 1;
300 while !backoff.advise_yield() {
301 backoff.spin();
302 steps += 1;
303 }
304
305 assert_eq!(steps, Strategy::SPIN_LIMIT_POW);
306 }
307
308 #[cfg(feature = "random")]
309 #[test]
310 fn spin_full_random() {
311 let backoff = BackOff::random();
312 let mut steps = 1;
313 while !backoff.advise_yield() {
314 backoff.spin();
315 steps += 1;
316 }
317
318 assert_eq!(steps, Strategy::SPIN_LIMIT_POW);
319 }
320}