rustpython_vm/
datastack.rs1use alloc::alloc::{alloc, dealloc};
8use core::alloc::Layout;
9use core::ptr;
10
11const MIN_CHUNK_SIZE: usize = 16 * 1024;
13
14const MINIMUM_OVERHEAD: usize = 1000 * core::mem::size_of::<usize>();
17
18const ALIGN: usize = 16;
20
21#[repr(C)]
24struct DataStackChunk {
25 previous: *mut DataStackChunk,
27 size: usize,
29 saved_top: usize,
32}
33
34impl DataStackChunk {
35 #[inline(always)]
37 fn data_start(&self) -> *mut u8 {
38 let header_end = (self as *const Self as usize) + core::mem::size_of::<Self>();
39 let aligned = (header_end + ALIGN - 1) & !(ALIGN - 1);
40 aligned as *mut u8
41 }
42
43 #[inline(always)]
45 fn data_limit(&self) -> *mut u8 {
46 unsafe { (self as *const Self as *mut u8).add(self.size) }
47 }
48}
49
50pub struct DataStack {
52 chunk: *mut DataStackChunk,
54 top: *mut u8,
56 limit: *mut u8,
58}
59
60impl DataStack {
61 pub fn new() -> Self {
63 let chunk = Self::alloc_chunk(MIN_CHUNK_SIZE, ptr::null_mut());
64 let top = unsafe { (*chunk).data_start() };
65 let limit = unsafe { (*chunk).data_limit() };
66 let top = unsafe { top.add(ALIGN) };
69 Self { chunk, top, limit }
70 }
71
72 #[inline(always)]
74 pub fn has_space(&self, size: usize) -> bool {
75 let aligned_size = (size + ALIGN - 1) & !(ALIGN - 1);
76 (self.limit as usize).saturating_sub(self.top as usize) >= aligned_size
77 }
78
79 #[inline(always)]
85 pub fn push(&mut self, size: usize) -> *mut u8 {
86 let aligned_size = (size + ALIGN - 1) & !(ALIGN - 1);
87 unsafe {
88 if self.top.add(aligned_size) <= self.limit {
89 let ptr = self.top;
90 self.top = self.top.add(aligned_size);
91 ptr
92 } else {
93 self.push_slow(aligned_size)
94 }
95 }
96 }
97
98 #[cold]
100 #[inline(never)]
101 fn push_slow(&mut self, aligned_size: usize) -> *mut u8 {
102 let mut chunk_size = MIN_CHUNK_SIZE;
103 let needed = aligned_size
104 .checked_add(MINIMUM_OVERHEAD)
105 .and_then(|v| v.checked_add(core::mem::size_of::<DataStackChunk>()))
106 .and_then(|v| v.checked_add(ALIGN))
107 .expect("DataStack chunk size overflow");
108 while chunk_size < needed {
109 chunk_size = chunk_size
110 .checked_mul(2)
111 .expect("DataStack chunk size overflow");
112 }
113 unsafe {
115 (*self.chunk).saved_top = self.top as usize - self.chunk as usize;
116 }
117 let new_chunk = Self::alloc_chunk(chunk_size, self.chunk);
118 self.chunk = new_chunk;
119 let start = unsafe { (*new_chunk).data_start() };
120 self.limit = unsafe { (*new_chunk).data_limit() };
121 self.top = unsafe { start.add(aligned_size) };
122 start
123 }
124
125 #[inline(always)]
132 pub unsafe fn pop(&mut self, base: *mut u8) {
133 debug_assert!(!base.is_null());
134 if self.is_in_current_chunk(base) {
135 self.top = base;
137 } else {
138 unsafe { self.pop_slow(base) };
140 }
141 }
142
143 #[inline(always)]
147 fn is_in_current_chunk(&self, ptr: *mut u8) -> bool {
148 let chunk_start = unsafe { (*self.chunk).data_start() };
149 ptr >= chunk_start && ptr <= self.limit
150 }
151
152 #[cold]
154 #[inline(never)]
155 unsafe fn pop_slow(&mut self, base: *mut u8) {
156 loop {
157 let old_chunk = self.chunk;
158 let prev = unsafe { (*old_chunk).previous };
159 debug_assert!(!prev.is_null(), "tried to pop past the root chunk");
160 unsafe { Self::free_chunk(old_chunk) };
161 self.chunk = prev;
162 self.limit = unsafe { (*prev).data_limit() };
163 if self.is_in_current_chunk(base) {
164 self.top = base;
165 return;
166 }
167 }
168 }
169
170 fn alloc_chunk(size: usize, previous: *mut DataStackChunk) -> *mut DataStackChunk {
172 let layout = Layout::from_size_align(size, ALIGN).expect("invalid chunk layout");
173 let ptr = unsafe { alloc(layout) };
174 if ptr.is_null() {
175 alloc::alloc::handle_alloc_error(layout);
176 }
177 let chunk = ptr as *mut DataStackChunk;
178 unsafe {
179 (*chunk).previous = previous;
180 (*chunk).size = size;
181 (*chunk).saved_top = 0;
182 }
183 chunk
184 }
185
186 unsafe fn free_chunk(chunk: *mut DataStackChunk) {
188 let size = unsafe { (*chunk).size };
189 let layout = Layout::from_size_align(size, ALIGN).expect("invalid chunk layout");
190 unsafe { dealloc(chunk as *mut u8, layout) };
191 }
192}
193
194unsafe impl Send for DataStack {}
197
198impl Default for DataStack {
199 fn default() -> Self {
200 Self::new()
201 }
202}
203
204impl Drop for DataStack {
205 fn drop(&mut self) {
206 let mut chunk = self.chunk;
207 while !chunk.is_null() {
208 let prev = unsafe { (*chunk).previous };
209 unsafe { Self::free_chunk(chunk) };
210 chunk = prev;
211 }
212 }
213}
214
215#[cfg(test)]
216mod tests {
217 use super::*;
218
219 #[test]
220 fn basic_push_pop() {
221 let mut ds = DataStack::new();
222 let p1 = ds.push(64);
223 assert!(!p1.is_null());
224 let p2 = ds.push(128);
225 assert!(!p2.is_null());
226 assert!(p2 > p1);
227 unsafe {
228 ds.pop(p2);
229 ds.pop(p1);
230 }
231 }
232
233 #[test]
234 fn cross_chunk_push_pop() {
235 let mut ds = DataStack::new();
236 let mut ptrs = Vec::new();
238 for _ in 0..100 {
239 ptrs.push(ds.push(1024));
240 }
241 for p in ptrs.into_iter().rev() {
243 unsafe { ds.pop(p) };
244 }
245 }
246
247 #[test]
248 fn alignment() {
249 let mut ds = DataStack::new();
250 for size in [1, 7, 15, 16, 17, 31, 32, 33, 64, 100] {
251 let p = ds.push(size);
252 assert_eq!(p as usize % ALIGN, 0, "alignment violated for size {size}");
253 unsafe { ds.pop(p) };
254 }
255 }
256}