minitrace/util/
object_pool.rs

1// Copyright 2022 TiKV Project Authors. Licensed under Apache-2.0.
2
3use std::cell::Cell;
4use std::mem::ManuallyDrop;
5use std::ops::Deref;
6use std::ops::DerefMut;
7
8use parking_lot::Mutex;
9
10thread_local! {
11    static REUSABLE: Cell<bool> = const { Cell::new(false) };
12}
13
14pub fn enable_reuse_in_current_thread() {
15    REUSABLE.with(|r| r.set(true));
16}
17
18fn is_reusable() -> bool {
19    REUSABLE.with(|r| r.get())
20}
21
22pub struct Pool<T> {
23    // The objects in the pool ready to be reused.
24    // The mutex should only be visited in the global collector, which is guaranteed by
25    // `is_reusable`, so it should not have synchronization overhead.
26    objects: Mutex<Vec<T>>,
27    init: fn() -> T,
28    reset: fn(&mut T),
29}
30
31impl<T> Pool<T> {
32    #[inline]
33    pub fn new(init: fn() -> T, reset: fn(&mut T)) -> Pool<T> {
34        Pool {
35            objects: Mutex::new(Vec::new()),
36            init,
37            reset,
38        }
39    }
40
41    #[inline]
42    fn batch_pull<'a>(&'a self, n: usize, buffer: &mut Vec<Reusable<'a, T>>) {
43        let mut objects = self.objects.lock();
44        let len = objects.len();
45        buffer.extend(
46            objects
47                .drain(len.saturating_sub(n)..)
48                .map(|obj| Reusable::new(self, obj)),
49        );
50        drop(objects);
51        buffer.resize_with(n, || Reusable::new(self, (self.init)()));
52    }
53
54    pub fn puller(&self, buffer_size: usize) -> Puller<T> {
55        assert!(buffer_size > 0);
56        Puller {
57            pool: self,
58            buffer: Vec::with_capacity(buffer_size),
59            buffer_size,
60        }
61    }
62
63    #[inline]
64    pub fn recycle(&self, mut obj: T) {
65        if is_reusable() {
66            (self.reset)(&mut obj);
67            self.objects.lock().push(obj)
68        }
69    }
70}
71
72pub struct Puller<'a, T> {
73    pool: &'a Pool<T>,
74    buffer: Vec<Reusable<'a, T>>,
75    buffer_size: usize,
76}
77
78impl<'a, T> Puller<'a, T> {
79    #[inline]
80    pub fn pull(&mut self) -> Reusable<'a, T> {
81        self.buffer.pop().unwrap_or_else(|| {
82            self.pool.batch_pull(self.buffer_size, &mut self.buffer);
83            self.buffer.pop().unwrap()
84        })
85    }
86}
87
88pub struct Reusable<'a, T> {
89    pool: &'a Pool<T>,
90    obj: ManuallyDrop<T>,
91}
92
93impl<'a, T> Reusable<'a, T> {
94    #[inline]
95    pub fn new(pool: &'a Pool<T>, obj: T) -> Self {
96        Self {
97            pool,
98            obj: ManuallyDrop::new(obj),
99        }
100    }
101
102    #[inline]
103    pub fn into_inner(mut self) -> T {
104        unsafe {
105            let obj = ManuallyDrop::take(&mut self.obj);
106            std::mem::forget(self);
107            obj
108        }
109    }
110}
111
112impl<'a, T> std::fmt::Debug for Reusable<'a, T>
113where T: std::fmt::Debug
114{
115    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
116        self.obj.fmt(f)
117    }
118}
119
120impl<'a, T> std::cmp::PartialEq for Reusable<'a, T>
121where T: std::cmp::PartialEq
122{
123    fn eq(&self, other: &Self) -> bool {
124        T::eq(self, other)
125    }
126}
127
128impl<'a, T> std::cmp::Eq for Reusable<'a, T> where T: std::cmp::Eq {}
129
130impl<'a, T> Deref for Reusable<'a, T> {
131    type Target = T;
132
133    #[inline]
134    fn deref(&self) -> &Self::Target {
135        &self.obj
136    }
137}
138
139impl<'a, T> DerefMut for Reusable<'a, T> {
140    #[inline]
141    fn deref_mut(&mut self) -> &mut Self::Target {
142        &mut self.obj
143    }
144}
145
146impl<'a, T> Drop for Reusable<'a, T> {
147    #[inline]
148    fn drop(&mut self) {
149        unsafe {
150            self.pool.recycle(ManuallyDrop::take(&mut self.obj));
151        }
152    }
153}