1use std::mem;
7use std::sync::atomic::{AtomicU64, Ordering};
8
9use crate::address::Address;
10
11#[cfg(feature = "f2")]
12use bytemuck::Pod;
13
14#[repr(C)]
23pub struct RecordInfo {
24 control: AtomicU64,
25}
26
27impl RecordInfo {
28 const PREV_ADDR_MASK: u64 = (1 << 48) - 1;
30
31 const VERSION_SHIFT: u32 = 48;
33 const VERSION_MASK: u64 = (1 << 13) - 1;
35
36 const INVALID_BIT: u64 = 1 << 61;
38 const TOMBSTONE_BIT: u64 = 1 << 62;
40 const FINAL_BIT: u64 = 1 << 63;
42
43 const READ_CACHE_BIT: u64 = 1 << 47;
45
46 pub fn new(
48 previous_address: Address,
49 checkpoint_version: u16,
50 invalid: bool,
51 tombstone: bool,
52 final_bit: bool,
53 ) -> Self {
54 let mut control = previous_address.control() & Self::PREV_ADDR_MASK;
55 control |= ((checkpoint_version as u64) & Self::VERSION_MASK) << Self::VERSION_SHIFT;
56 if invalid {
57 control |= Self::INVALID_BIT;
58 }
59 if tombstone {
60 control |= Self::TOMBSTONE_BIT;
61 }
62 if final_bit {
63 control |= Self::FINAL_BIT;
64 }
65 Self {
66 control: AtomicU64::new(control),
67 }
68 }
69
70 pub fn from_control(control: u64) -> Self {
72 Self {
73 control: AtomicU64::new(control),
74 }
75 }
76
77 #[inline]
79 pub fn is_null(&self) -> bool {
80 self.control.load(Ordering::Acquire) == 0
81 }
82
83 #[inline]
85 pub fn previous_address(&self) -> Address {
86 Address::from_control(self.control.load(Ordering::Acquire) & Self::PREV_ADDR_MASK)
87 }
88
89 #[inline]
91 pub fn set_previous_address(&self, addr: Address) {
92 let mut current = self.control.load(Ordering::Acquire);
93 loop {
94 let new_val =
95 (current & !Self::PREV_ADDR_MASK) | (addr.control() & Self::PREV_ADDR_MASK);
96 match self.control.compare_exchange_weak(
97 current,
98 new_val,
99 Ordering::AcqRel,
100 Ordering::Acquire,
101 ) {
102 Ok(_) => break,
103 Err(actual) => current = actual,
104 }
105 }
106 }
107
108 #[inline]
110 pub fn checkpoint_version(&self) -> u16 {
111 ((self.control.load(Ordering::Acquire) >> Self::VERSION_SHIFT) & Self::VERSION_MASK) as u16
112 }
113
114 #[inline]
116 pub fn is_invalid(&self) -> bool {
117 (self.control.load(Ordering::Acquire) & Self::INVALID_BIT) != 0
118 }
119
120 #[inline]
122 pub fn set_invalid(&self, invalid: bool) {
123 if invalid {
124 self.control.fetch_or(Self::INVALID_BIT, Ordering::AcqRel);
125 } else {
126 self.control.fetch_and(!Self::INVALID_BIT, Ordering::AcqRel);
127 }
128 }
129
130 #[inline]
132 pub fn is_tombstone(&self) -> bool {
133 (self.control.load(Ordering::Acquire) & Self::TOMBSTONE_BIT) != 0
134 }
135
136 #[inline]
138 pub fn set_tombstone(&self, tombstone: bool) {
139 if tombstone {
140 self.control.fetch_or(Self::TOMBSTONE_BIT, Ordering::AcqRel);
141 } else {
142 self.control
143 .fetch_and(!Self::TOMBSTONE_BIT, Ordering::AcqRel);
144 }
145 }
146
147 #[inline]
149 pub fn is_final(&self) -> bool {
150 (self.control.load(Ordering::Acquire) & Self::FINAL_BIT) != 0
151 }
152
153 #[inline]
155 pub fn in_read_cache(&self) -> bool {
156 (self.control.load(Ordering::Acquire) & Self::READ_CACHE_BIT) != 0
157 }
158
159 #[inline]
161 pub fn control(&self) -> u64 {
162 self.control.load(Ordering::Acquire)
163 }
164
165 #[inline]
167 pub fn load(&self, ordering: Ordering) -> u64 {
168 self.control.load(ordering)
169 }
170
171 #[inline]
173 pub fn store(&self, value: u64, ordering: Ordering) {
174 self.control.store(value, ordering);
175 }
176}
177
178impl Clone for RecordInfo {
179 fn clone(&self) -> Self {
180 Self {
181 control: AtomicU64::new(self.control.load(Ordering::Acquire)),
182 }
183 }
184}
185
186impl Default for RecordInfo {
187 fn default() -> Self {
188 Self {
189 control: AtomicU64::new(0),
190 }
191 }
192}
193
194impl std::fmt::Debug for RecordInfo {
195 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
196 f.debug_struct("RecordInfo")
197 .field("previous_address", &self.previous_address())
198 .field("checkpoint_version", &self.checkpoint_version())
199 .field("invalid", &self.is_invalid())
200 .field("tombstone", &self.is_tombstone())
201 .field("final", &self.is_final())
202 .finish()
203 }
204}
205
206#[cfg(feature = "f2")]
207#[repr(C)]
208pub(crate) struct Record<K: Pod, V: Pod> {
209 pub(crate) header: RecordInfo,
210 _marker: std::marker::PhantomData<(K, V)>,
211}
212
213#[cfg(feature = "f2")]
214impl<K: Pod, V: Pod> Record<K, V> {
215 const ALIGN: usize = 8;
216
217 #[inline]
218 pub(crate) const fn size() -> usize {
219 Self::align_up(
220 mem::size_of::<RecordInfo>() + mem::size_of::<K>() + mem::size_of::<V>(),
221 Self::ALIGN,
222 )
223 }
224
225 #[inline]
226 pub(crate) const fn key_offset() -> usize {
227 mem::size_of::<RecordInfo>()
228 }
229
230 #[inline]
231 pub(crate) const fn value_offset() -> usize {
232 mem::size_of::<RecordInfo>() + mem::size_of::<K>()
233 }
234
235 #[inline]
236 const fn align_up(n: usize, align: usize) -> usize {
237 debug_assert!(align.is_power_of_two());
238 (n + (align - 1)) & !(align - 1)
239 }
240
241 #[inline]
242 pub(crate) unsafe fn read_key(base: *const u8) -> K {
243 let ptr = base.add(Self::key_offset()) as *const K;
244 std::ptr::read_unaligned(ptr)
245 }
246
247 #[inline]
248 pub(crate) unsafe fn read_value(base: *const u8) -> V {
249 let ptr = base.add(Self::value_offset()) as *const V;
250 std::ptr::read_unaligned(ptr)
251 }
252
253 #[inline]
254 pub(crate) unsafe fn write_key(base: *mut u8, key: K) {
255 let ptr = base.add(Self::key_offset()) as *mut K;
256 std::ptr::write_unaligned(ptr, key);
257 }
258
259 #[inline]
260 pub(crate) unsafe fn write_value(base: *mut u8, value: V) {
261 let ptr = base.add(Self::value_offset()) as *mut V;
262 std::ptr::write_unaligned(ptr, value);
263 }
264}
265
266const _: () = assert!(mem::size_of::<RecordInfo>() == 8);