skiplist_rust/
arena.rs

1use std::sync::atomic::{AtomicUsize, Ordering};
2use std::ptr;
3
4const BLOCK_SIZE: usize = 4096;
5
6pub struct Arena {
7    alloc_ptr: *mut u8,
8    alloc_bytes_remaining: usize,
9    blocks: Vec<*mut u8>,
10    memory_usage: AtomicUsize,
11}
12
13impl Default for Arena {
14    fn default() -> Self {
15        Self::new()
16    }
17}
18
19impl Arena {
20    pub fn new() -> Self {
21        Arena {
22            alloc_ptr: ptr::null_mut(),
23            alloc_bytes_remaining: 0,
24            blocks: Vec::new(),
25            memory_usage: AtomicUsize::new(0),
26        }
27    }
28
29    pub fn allocate(&mut self, bytes: usize) -> *mut u8 {
30        assert!(bytes > 0);
31        if bytes <= self.alloc_bytes_remaining {
32            unsafe {
33                let result = self.alloc_ptr;
34                self.alloc_ptr = self.alloc_ptr.add(bytes);
35                self.alloc_bytes_remaining -= bytes;
36                result
37            }
38        } else {
39            self.allocate_fallback(bytes)
40        }
41    }
42
43    pub fn allocate_aligned(&mut self, bytes: usize) -> *mut u8 {
44        let align = if std::mem::size_of::<*mut ()>() > 8 {
45            std::mem::size_of::<*mut ()>()
46        } else {
47            8
48        };
49        assert!(align.is_power_of_two());
50
51        let current_mod = (self.alloc_ptr as usize) & (align - 1);
52        let slop = if current_mod == 0 { 0 } else { align - current_mod };
53        let needed = bytes + slop;
54
55        if needed <= self.alloc_bytes_remaining {
56            unsafe {
57                let result = self.alloc_ptr.add(slop);
58                self.alloc_ptr = self.alloc_ptr.add(needed);
59                self.alloc_bytes_remaining -= needed;
60                result
61            }
62        } else {
63            self.allocate_fallback(bytes)
64        }
65    }
66
67    pub fn memory_usage(&self) -> usize {
68        self.memory_usage.load(Ordering::Relaxed)
69    }
70
71    fn allocate_fallback(&mut self, bytes: usize) -> *mut u8 {
72        if bytes > BLOCK_SIZE / 4 {
73            return self.allocate_new_block(bytes);
74        }
75
76        self.alloc_ptr = self.allocate_new_block(BLOCK_SIZE);
77        self.alloc_bytes_remaining = BLOCK_SIZE;
78
79        let result = self.alloc_ptr;
80        unsafe {
81            self.alloc_ptr = self.alloc_ptr.add(bytes);
82        }
83        self.alloc_bytes_remaining -= bytes;
84        result
85    }
86
87    fn allocate_new_block(&mut self, block_bytes: usize) -> *mut u8 {
88        let result = unsafe {
89            let layout = std::alloc::Layout::from_size_align(block_bytes, 1).unwrap();
90            std::alloc::alloc(layout)
91        };
92        self.blocks.push(result);
93        self.memory_usage.fetch_add(block_bytes + std::mem::size_of::<*mut u8>(), Ordering::Relaxed);
94        result
95    }
96}
97
98impl Drop for Arena {
99    fn drop(&mut self) {
100        for &block in &self.blocks {
101            unsafe {
102                let layout = std::alloc::Layout::from_size_align(BLOCK_SIZE, 1).unwrap();
103                std::alloc::dealloc(block, layout);
104            }
105        }
106    }
107}
108
109#[cfg(test)]
110mod test {
111    use std::cmp;
112    use rand::prelude::StdRng;
113    use rand::{Rng, SeedableRng};
114    use crate::arena::Arena;
115
116    #[test]
117    fn test_arena_empty() {
118        let _arena = Arena::new();
119    }
120
121    #[test]
122    fn test_arena_simple() {
123        let mut allocated = Vec::new();
124        let mut arena = Arena::new();
125        let n = 100000;
126        let mut bytes = 0;
127        let mut rng = StdRng::seed_from_u64(301);
128
129        for i in 0..n {
130            let s = if i % (n / 10) == 0 {
131                i
132            } else if rng.gen_bool(1.0 / 4000.0) {
133                rng.gen_range(0..6000)
134            } else if rng.gen_bool(0.1) {
135                rng.gen_range(0..100)
136            } else {
137                rng.gen_range(0..20)
138            };
139
140            let s = cmp::max(s, 1); // Our arena disallows size 0 allocations.
141
142            let r = if rng.gen_bool(0.1) {
143                arena.allocate_aligned(s)
144            } else {
145                arena.allocate(s)
146            };
147
148            unsafe {
149                for b in 0..s {
150                    // Fill the "i"th allocation with a known bit pattern
151                    *r.add(b) = (i % 256) as u8;
152                }
153            }
154
155            bytes += s;
156            allocated.push((s, r));
157            assert!(arena.memory_usage() >= bytes);
158            if i > n / 10 {
159                assert!(arena.memory_usage() <= (bytes as f64 * 1.10) as usize);
160            }
161        }
162
163        for (i, &(num_bytes, p)) in allocated.iter().enumerate() {
164            unsafe {
165                for b in 0..num_bytes {
166                    // Check the "i"th allocation for the known bit pattern
167                    assert_eq!((*p.add(b) as i32) & 0xff, i as i32 % 256);
168                }
169            }
170        }
171    }
172}