second_stack/lib.rs
1mod allocation;
2use allocation::Allocation;
3
4use std::{
5 self,
6 cell::UnsafeCell,
7 mem::{size_of, MaybeUninit},
8 ptr, slice,
9};
10
11thread_local!(
12 static THREAD_LOCAL: Stack = Stack::new()
13);
14
15/// A Stack that is managed separately from the threadlocal one.
16/// Typically, using the threadlocal APIs
17/// is encouraged because they enable sharing across libraries, where each
18/// re-use lowers the amortized cost of maintaining allocations. But, if
19/// full control is necessary this API may be used.
20pub struct Stack(UnsafeCell<Allocation>);
21
22impl Drop for Stack {
23 fn drop(&mut self) {
24 let stack = self.0.get_mut();
25 // It's ok to use force_dealloc here instead of try_dealloc
26 // because we know the allocation cannot be in-use. By eliding
27 // the check, this allows the allocation to be freed when there
28 // was a panic
29 unsafe {
30 stack.force_dealloc();
31 }
32 }
33}
34
35impl Stack {
36 pub fn new() -> Self {
37 Self(UnsafeCell::new(Allocation::null()))
38 }
39
40 /// Place a potentially very large value on this stack.
41 pub fn uninit<T, R, F>(&self, f: F) -> R
42 where
43 F: FnOnce(&mut MaybeUninit<T>) -> R,
44 {
45 // Delegate implementation to uninit_slice just to get this working.
46 // Performance could be slightly improved with a bespoke implementation
47 // of this method.
48 self.uninit_slice(1, |slice| f(&mut slice[0]))
49 }
50
51 /// Allocates an uninit slice from this stack.
52 pub fn uninit_slice<T, F, R>(&self, len: usize, f: F) -> R
53 where
54 F: FnOnce(&mut [MaybeUninit<T>]) -> R,
55 {
56 // Special case for ZST that disregards the rest of the code,
57 // so that none of that code need account for ZSTs.
58 // The reason this is convenient is that a ZST may use
59 // the stack without bumping the pointer, which will
60 // lead other code to free that memory while still in-use.
61 // See also: 2ec61cda-e074-4b26-a9a5-a01b70706585
62 // There may be other issues also.
63 if std::mem::size_of::<T>() == 0 {
64 let mut tmp = Vec::<T>::with_capacity(len);
65 // We do need to take a slice here, because suprisingly
66 // tmp.capacity() returns 18446744073709551615
67 let slice = &mut tmp.spare_capacity_mut()[..len];
68 return f(slice);
69 }
70
71 // Required for correctness
72 // See also: 26936c11-5b7c-472e-8f63-7922e63a5425
73 if len == 0 {
74 return f(&mut []);
75 }
76
77 // Get the new slice, and the old allocation to
78 // restore once the function is finished running.
79 let (_restore, (ptr, len)) = unsafe {
80 let stack = &mut *self.0.get();
81 stack.get_slice(&self.0, len)
82 };
83
84 let slice = unsafe { slice::from_raw_parts_mut(ptr as *mut MaybeUninit<T>, len) };
85
86 f(slice)
87 }
88
89 /// Buffers an iterator to a slice on this stack and gives temporary access to that slice.
90 /// Do not use with an unbounded iterator, because this will eventually run out of memory and panic.
91 pub fn buffer<T, F, R, I>(&self, i: I, f: F) -> R
92 where
93 I: Iterator<Item = T>,
94 F: FnOnce(&mut [T]) -> R,
95 {
96 // Special case for ZST
97 if size_of::<T>() == 0 {
98 let mut v: Vec<_> = i.collect();
99 return f(&mut v);
100 }
101
102 // Data goes in a struct in case user code panics.
103 // User code includes Iterator::next, FnOnce, and Drop::drop
104 struct Writer<'a, T> {
105 restore: Option<DropStack<'a>>,
106 base: *mut T,
107 len: usize,
108 capacity: usize,
109 }
110
111 impl<T> Writer<'_, T> {
112 unsafe fn write(&mut self, item: T) {
113 self.base.add(self.len).write(item);
114 self.len += 1;
115 }
116
117 fn try_reuse(&mut self, stack: &mut Allocation) -> bool {
118 if let Some(prev) = &self.restore {
119 if prev.restore.ref_eq(stack) {
120 // If we are already are using this stack, we know the
121 // end ptr is already aligned. To double in size,
122 // we would need as many bytes as there are currently
123 // and do not need to align
124 let required_bytes = size_of::<T>() * self.capacity;
125
126 if stack.remaining_bytes() >= required_bytes {
127 stack.len += required_bytes;
128 self.capacity *= 2;
129 return true;
130 }
131 }
132 }
133 false
134 }
135 }
136
137 impl<T> Drop for Writer<'_, T> {
138 fn drop(&mut self) {
139 unsafe {
140 for i in 0..self.len {
141 self.base.add(i).drop_in_place()
142 }
143 }
144 }
145 }
146
147 unsafe {
148 let mut writer = Writer {
149 restore: None,
150 base: ptr::null_mut(),
151 capacity: 0,
152 len: 0,
153 };
154
155 for next in i {
156 if writer.capacity == writer.len {
157 let stack = &mut *self.0.get();
158
159 // First try to use the same stack, but if that fails
160 // copy over to the upsized stack
161 if !writer.try_reuse(stack) {
162 // This will always be a different allocation, otherwise
163 // try_reuse would have succeeded
164 let (restore, (base, capacity)) =
165 stack.get_slice(&self.0, (writer.len * 2).max(1));
166
167 // Check for 0 is to avoid copy from null ptr (miri violation)
168 if writer.len != 0 {
169 ptr::copy_nonoverlapping(writer.base, base, writer.len);
170 }
171
172 // This attempts to restore the old allocation when
173 // writer.restore is Some, but we know that there
174 // is a new allocation at this point, so the only
175 // thing it can do is free memory
176 writer.restore = Some(restore);
177
178 writer.capacity = capacity;
179 writer.base = base;
180 }
181 }
182 writer.write(next);
183 }
184
185 // TODO: (Performance?) Drop reserve of unused stack, if any. We have over-allocated.
186 // TODO: (Performance?) Consider using size_hint
187
188 let buffer = slice::from_raw_parts_mut(writer.base, writer.len);
189 f(buffer)
190 }
191 }
192}
193
194/// Allocates an uninit slice from the threadlocal stack.
195pub fn uninit_slice<T, F, R>(len: usize, f: F) -> R
196where
197 F: FnOnce(&mut [MaybeUninit<T>]) -> R,
198{
199 THREAD_LOCAL.with(|stack| stack.uninit_slice(len, f))
200}
201
202/// Place a potentially very large value on the threadlocal second stack.
203pub fn uninit<T, F, R>(f: F) -> R
204where
205 F: FnOnce(&mut MaybeUninit<T>) -> R,
206{
207 THREAD_LOCAL.with(|stack| stack.uninit(f))
208}
209
210/// Buffers an iterator to a slice on the threadlocal stack and gives temporary access to that slice.
211/// Panics when running out of memory if the iterator is unbounded.
212pub fn buffer<T, F, R, I>(i: I, f: F) -> R
213where
214 I: Iterator<Item = T>,
215 F: FnOnce(&mut [T]) -> R,
216{
217 THREAD_LOCAL.with(|stack| stack.buffer(i, f))
218}
219
220// The logic to drop our Allocation goes into a drop impl so that if there
221// is a panic the drop logic is still run and we don't leak any memory.
222pub(crate) struct DropStack<'a> {
223 pub restore: Allocation,
224 pub location: &'a UnsafeCell<Allocation>,
225}
226
227impl Drop for DropStack<'_> {
228 fn drop(&mut self) {
229 unsafe {
230 let mut current = &mut *self.location.get();
231 if current.ref_eq(&self.restore) {
232 current.len = self.restore.len;
233 } else {
234 self.restore.try_dealloc();
235 }
236 }
237 }
238}