1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
use pool_crate;
use std::ops;
use std::sync::atomic::{AtomicUsize, Ordering};
static BUFFER_COUNT: AtomicUsize = AtomicUsize::new(0);
pub type Reset = pool_crate::Reset;
pub struct Pool<T:pool_crate::Reset> {
pub inner: pool_crate::Pool<T>,
}
impl<T: pool_crate::Reset> Pool<T> {
pub fn with_capacity<F>(count: usize, extra: usize, init: F) -> Pool<T>
where F: Fn() -> T {
Pool {
inner: pool_crate::Pool::with_capacity(count, extra, init),
}
}
pub fn checkout(&mut self) -> Option<Checkout<T>> {
self.inner.checkout().map(|c| {
let old_buffer_count = BUFFER_COUNT.fetch_add(1, Ordering::SeqCst);
gauge!("buffer.count", old_buffer_count + 1);
Checkout {
inner: c
}
})
}
}
impl<T: pool_crate::Reset> ops::Deref for Pool<T> {
type Target = pool_crate::Pool<T>;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<T: pool_crate::Reset> ops::DerefMut for Pool<T> {
fn deref_mut(&mut self) -> &mut pool_crate::Pool<T> {
&mut self.inner
}
}
pub struct Checkout<T> {
pub inner: pool_crate::Checkout<T>,
}
impl<T> ops::Deref for Checkout<T> {
type Target = pool_crate::Checkout<T>;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<T> ops::DerefMut for Checkout<T> {
fn deref_mut(&mut self) -> &mut pool_crate::Checkout<T> {
&mut self.inner
}
}
impl<T> Drop for Checkout<T> {
fn drop(&mut self) {
let old_buffer_count = BUFFER_COUNT.fetch_sub(1, Ordering::SeqCst);
gauge!("buffer.count", old_buffer_count - 1);
}
}
unsafe impl<T: Send> Send for Checkout<T> { }
unsafe impl<T: Sync> Sync for Checkout<T> { }