stack_arena/
simple_obstack.rs1use std::ptr::NonNull;
2
3use crate::Allocator;
4
5#[derive(Debug)]
6pub struct SimpleObstack {
7 stack: Vec<Box<[u8]>>,
8 current: Vec<u8>,
9}
10
11impl SimpleObstack {
12 pub fn new() -> Self {
13 Self {
14 stack: Vec::new(),
15 current: Vec::with_capacity(1024),
16 }
17 }
18
19 pub fn len(&self) -> usize {
20 self.stack.len()
21 }
22
23 pub fn is_empty(&self) -> bool {
24 self.stack.is_empty()
25 }
26
27 pub fn push<P: AsRef<[u8]>>(&mut self, s: P) {
28 let object = s.as_ref().to_vec().into_boxed_slice();
29 self.stack.push(object);
30 }
31
32 pub fn pop(&mut self) -> Option<Box<[u8]>> {
33 self.stack.pop()
34 }
35
36 pub fn extend<P: AsRef<[u8]>>(&mut self, s: P) {
37 self.current.extend_from_slice(s.as_ref());
38 }
39
40 pub fn finish(&mut self) -> NonNull<[u8]> {
41 let object =
42 std::mem::replace(&mut self.current, Vec::with_capacity(1024)).into_boxed_slice();
43 let ptr = NonNull::slice_from_raw_parts(
44 unsafe { NonNull::new_unchecked(object.as_ptr() as *mut u8) },
45 object.len(),
46 );
47 self.stack.push(object);
48 ptr
49 }
50
51 pub fn free(&mut self, s: &[u8]) {
52 while let Some(o) = self.stack.pop() {
53 if std::ptr::addr_eq(o.as_ref(), s) {
54 break;
55 }
56 }
57 }
58}
59
60impl std::fmt::Write for SimpleObstack {
61 fn write_str(&mut self, s: &str) -> std::fmt::Result {
62 self.extend(s.as_bytes());
63 Ok(())
64 }
65}
66
67impl Allocator for SimpleObstack {
68 unsafe fn allocate(
69 &mut self,
70 layout: std::alloc::Layout,
71 ) -> Result<std::ptr::NonNull<[u8]>, crate::AllocError> {
72 let ptr = unsafe { std::alloc::alloc(layout) };
73 if ptr.is_null() {
74 std::alloc::handle_alloc_error(layout);
75 }
76 unsafe {
77 let slice = std::slice::from_raw_parts_mut(ptr, layout.size());
78 let object = Box::from_raw(slice);
79 self.stack.push(object);
80 Ok(NonNull::slice_from_raw_parts(
81 NonNull::new_unchecked(ptr),
82 layout.size(),
83 ))
84 }
85 }
86
87 unsafe fn deallocate(&mut self, ptr: std::ptr::NonNull<u8>, layout: std::alloc::Layout) {
88 while let Some(object) = self.stack.pop() {
89 if object.as_ptr() == ptr.as_ptr() {
90 debug_assert_eq!(object.len(), layout.size());
91 break;
92 }
93 }
94 }
95
96 unsafe fn grow(
97 &mut self,
98 ptr: NonNull<u8>,
99 old_layout: std::alloc::Layout,
100 new_layout: std::alloc::Layout,
101 ) -> Result<NonNull<[u8]>, crate::AllocError> {
102 match old_layout.size().cmp(&new_layout.size()) {
103 std::cmp::Ordering::Less => {
104 let mut new_ptr = self.allocate(new_layout)?;
105 new_ptr
106 .cast::<u8>()
107 .copy_from_nonoverlapping(ptr, old_layout.size());
108 let object = Box::from_raw(new_ptr.as_mut());
109 self.stack.push(object);
110 Ok(new_ptr)
111 }
112 std::cmp::Ordering::Equal => Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size())),
113 std::cmp::Ordering::Greater => panic!("use shrink instead"),
114 }
115 }
116
117 unsafe fn shrink(
118 &mut self,
119 ptr: NonNull<u8>,
120 old_layout: std::alloc::Layout,
121 new_layout: std::alloc::Layout,
122 ) -> Result<NonNull<[u8]>, crate::AllocError> {
123 debug_assert!(old_layout.size() >= new_layout.size());
124 let object = NonNull::slice_from_raw_parts(ptr, new_layout.size());
125 let chunk = object.as_ref()[..new_layout.size()]
126 .to_vec()
127 .into_boxed_slice();
128 self.stack.push(chunk);
129 Ok(object)
130 }
131}
132
133#[cfg(test)]
134mod tests {
135 use std::fmt::Write;
136
137 use super::*;
138
139 #[test]
140 fn test_lifecycle() {
141 let mut stack = SimpleObstack::new();
142 write!(&mut stack, "ab").expect("write");
143 let s = "c";
144 stack.extend(s);
145 let p = unsafe { stack.finish().as_ref() };
146 assert_eq!(p, b"abc");
147 }
148}