comet/internal/
block_list.rs1use crate::block::Block;
2use crossbeam_utils::atomic::AtomicCell;
3use std::{ptr::null_mut, sync::atomic::AtomicUsize};
4#[derive(Clone)]
5pub struct AllBlockList {
6 head: *mut Block,
7}
8
9impl AllBlockList {
10 pub fn new() -> Self {
11 Self { head: null_mut() }
12 }
13
14 pub fn push(&mut self, block: *mut Block) {
15 unsafe {
16 (*block).all_next = self.head;
17 self.head = block;
18 }
19 }
20
21 pub fn pop(&mut self) -> *mut Block {
22 unsafe {
23 if self.head.is_null() {
24 return null_mut();
25 }
26 let head = self.head;
27 self.head = (*head).all_next;
28 head
29 }
30 }
31
32 pub fn is_empty(&self) -> bool {
33 self.head.is_null()
34 }
35}
36
37#[derive(Clone)]
38pub struct BlockList {
39 head: *mut Block,
40}
41
42impl BlockList {
43 pub fn new() -> Self {
44 Self { head: null_mut() }
45 }
46 pub fn for_each(&self, mut visitor: impl FnMut(*mut Block)) {
47 unsafe {
48 let mut head = self.head;
49 while !head.is_null() {
50 visitor(head);
51 head = (*head).next;
52 }
53 }
54 }
55 pub fn push(&mut self, block: *mut Block) {
56 unsafe {
57 (*block).next = self.head;
58 self.head = block;
59 }
60 }
61
62 pub fn pop(&mut self) -> *mut Block {
63 unsafe {
64 if self.head.is_null() {
65 return null_mut();
66 }
67 let head = self.head;
68 self.head = (*head).next;
69 head
70 }
71 }
72
73 pub fn is_empty(&self) -> bool {
74 self.head.is_null()
75 }
76}
77
78pub struct AtomicBlockList {
80 next: AtomicCell<*mut Block>,
81 count: AtomicUsize,
82}
83impl Clone for AtomicBlockList {
84 fn clone(&self) -> Self {
85 Self {
86 next: AtomicCell::new(null_mut()),
87 count: AtomicUsize::new(0),
88 }
89 }
90}
91impl AtomicBlockList {
92 pub fn new() -> Self {
93 Self {
94 count: AtomicUsize::new(0),
95 next: AtomicCell::new(null_mut()),
96 }
97 }
98 pub fn head(&self) -> *mut Block {
99 self.next.load()
100 }
101 pub unsafe fn add_free(&self, free: *mut Block) {
102 let new_slot = free;
103 let mut next = self.next.load();
104 loop {
105 debug_assert_ne!(new_slot, next);
106 (*new_slot).next = next;
107 match self.next.compare_exchange(next, new_slot) {
108 Ok(_) => {
109 self.count.fetch_add(1, atomic::Ordering::AcqRel);
110 return;
111 }
112 Err(actual_next) => {
113 next = actual_next;
114 }
115 }
116 }
117 }
118 #[inline]
119 pub fn take_free(&self) -> *mut Block {
120 loop {
121 unsafe {
122 let next_free = match self.next.load() {
123 x if x.is_null() => return null_mut(),
124 x => x,
125 };
126 debug_assert_ne!(next_free, (*next_free).next);
127 if self
128 .next
129 .compare_exchange(next_free, (*next_free).next)
130 .is_err()
131 {
132 continue;
133 }
134 self.count.fetch_sub(1, atomic::Ordering::AcqRel);
135 return next_free;
136 }
137 }
138 }
139
140 #[inline]
141 pub fn count(&self) -> usize {
142 self.count.load(atomic::Ordering::Acquire)
143 }
144}