nolock/allocator/lrmalloc/util/
list.rs1use std::{alloc::GlobalAlloc, sync::atomic};
2
3struct Node<T> {
4 data: T,
5 next: atomic::AtomicPtr<Self>,
6}
7
8impl<T> Node<T> {
9 pub const fn new(data: T) -> Self {
10 Self {
11 data,
12 next: atomic::AtomicPtr::new(std::ptr::null_mut()),
13 }
14 }
15
16 pub fn alloc<G>(self, allocator: &G) -> *mut Self
17 where
18 G: GlobalAlloc,
19 {
20 let layout = std::alloc::Layout::new::<Self>();
21 let raw_ptr = unsafe { allocator.alloc(layout) };
22 let ptr: *mut Self = raw_ptr as *mut Self;
23
24 unsafe { ptr.write(self) };
25
26 ptr
27 }
28
29 pub fn dealloc<G>(ptr: *mut Self, allocator: &G)
30 where
31 G: GlobalAlloc,
32 {
33 let layout = std::alloc::Layout::new::<Self>();
34 unsafe { allocator.dealloc(ptr as *mut u8, layout) };
35 }
36}
37
38struct NodeIter<T> {
39 current: *mut Node<T>,
40}
41
42impl<T> Iterator for NodeIter<T> {
43 type Item = *mut Node<T>;
44
45 fn next(&mut self) -> Option<Self::Item> {
46 let current_ptr = self.current;
47 if current_ptr.is_null() {
48 return None;
49 }
50
51 let current = unsafe { &*current_ptr };
52 let new_ptr = current.next.load(atomic::Ordering::Acquire);
53
54 self.current = new_ptr;
55
56 Some(current_ptr)
57 }
58}
59
60pub struct List<T> {
61 head: Node<T>,
62}
63
64impl<T> List<T> {
65 pub const fn new(initial_entry: T) -> Self {
66 let initial_node = Node::new(initial_entry);
67
68 Self { head: initial_node }
69 }
70
71 unsafe fn head_ptr(&self) -> *mut Node<T> {
75 let field_ref = &self.head;
76 field_ref as *const Node<T> as *mut Node<T>
77 }
78
79 fn node_iter(&self) -> NodeIter<T> {
80 let head_ptr = unsafe { self.head_ptr() };
81 NodeIter { current: head_ptr }
82 }
83
84 pub fn append(&self, data: T) {
85 let new_node = Node::new(data);
86 let new_node_ptr = new_node.alloc(&std::alloc::System);
87
88 let mut iter = self.node_iter().peekable();
89
90 let mut latest = &self.head;
91 while iter.peek().is_some() {
92 let ptr = iter
93 .next()
94 .expect("We just peeked on the Iterator and found an element so this must succeed");
95 latest = unsafe { &*ptr };
96 }
97
98 loop {
99 match latest.next.compare_exchange(
100 std::ptr::null_mut(),
101 new_node_ptr,
102 atomic::Ordering::AcqRel,
103 atomic::Ordering::Acquire,
104 ) {
105 Ok(_) => return,
106 Err(ptr) => {
107 latest = unsafe { &*ptr };
108 }
109 };
110 }
111 }
112
113 pub fn iter(&self) -> ListIter<'_, T> {
114 ListIter {
115 node_iter: self.node_iter(),
116 _marker: std::marker::PhantomData {},
117 }
118 }
119}
120
121unsafe impl<T> Sync for List<T> {}
122
123impl<T> Drop for List<T> {
124 fn drop(&mut self) {
125 let mut iter = self.node_iter();
126 let _ = iter.next();
129
130 for node_ptr in iter {
131 Node::dealloc(node_ptr, &std::alloc::System);
132 }
133 }
134}
135
136pub struct ListIter<'iter, T> {
137 node_iter: NodeIter<T>,
138 _marker: std::marker::PhantomData<&'iter ()>,
139}
140
141impl<'iter, T> Iterator for ListIter<'iter, T>
142where
143 T: 'iter,
144{
145 type Item = &'iter T;
146
147 fn next(&mut self) -> Option<Self::Item> {
148 let node_ptr = self.node_iter.next()?;
149 let node = unsafe { &*node_ptr };
150 Some(&node.data)
151 }
152}
153
154#[cfg(test)]
155mod tests {
156 use super::*;
157
158 #[test]
159 fn new() {
160 List::<u8>::new(0);
161 }
162
163 #[test]
164 fn append_iter() {
165 let list = List::<u8>::new(0);
166
167 list.append(123);
168 list.append(234);
169
170 let mut iter = list.iter();
171
172 assert_eq!(Some(&0), iter.next());
173 assert_eq!(Some(&123), iter.next());
174 assert_eq!(Some(&234), iter.next());
175 assert_eq!(None, iter.next());
176 }
177
178 #[test]
179 fn iter_append_middle() {
180 let list = List::<usize>::new(0);
181
182 list.append(123);
183 list.append(234);
184
185 let mut iter = list.iter();
186
187 assert_eq!(Some(&0), iter.next());
188 assert_eq!(Some(&123), iter.next());
189
190 list.append(345);
191
192 assert_eq!(Some(&234), iter.next());
193 assert_eq!(Some(&345), iter.next());
194 assert_eq!(None, iter.next());
195 }
196}