1use super::util::range_move;
5use crate::static_assert_size;
6use ahash::RandomState;
7use core::fmt;
8use core::ops::{AddAssign, Div, Range, SubAssign};
9use derive_more::{Add, Sub};
10use spacetimedb_data_structures::map::ValidAsIdentityHash;
11use spacetimedb_sats::layout::Size;
12use spacetimedb_sats::memory_usage::MemoryUsage;
13use spacetimedb_sats::{impl_deserialize, impl_serialize};
14
15pub type Byte = u8;
21
22pub type Bytes = [Byte];
24
25pub(crate) const PAGE_SIZE: usize = u16::MAX as usize + 1;
29
30pub(crate) const PAGE_HEADER_SIZE: usize = 64;
34
35pub const PAGE_DATA_SIZE: usize = PAGE_SIZE - PAGE_HEADER_SIZE;
41
42#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
55#[cfg_attr(any(test, feature = "proptest"), derive(proptest_derive::Arbitrary))]
56pub struct RowHash(pub u64);
57
58impl MemoryUsage for RowHash {}
59
60static_assert_size!(RowHash, 8);
61
62impl ValidAsIdentityHash for RowHash {}
64
65impl RowHash {
66 pub fn hasher_builder() -> RandomState {
68 RandomState::with_seed(0x42)
70 }
71}
72
73#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Add, Sub, bytemuck::NoUninit)]
75#[repr(transparent)]
76#[cfg_attr(any(test, feature = "proptest"), derive(proptest_derive::Arbitrary))]
77pub struct PageOffset(
78 #[cfg_attr(any(test, feature = "proptest"), proptest(strategy = "0..PageOffset::PAGE_END.0"))] pub u16,
79);
80
81impl MemoryUsage for PageOffset {}
82
83static_assert_size!(PageOffset, 2);
84
85impl_serialize!([] PageOffset, (self, ser) => self.0.serialize(ser));
87impl_deserialize!([] PageOffset, de => u16::deserialize(de).map(PageOffset));
88
89impl PageOffset {
90 #[inline]
92 pub const fn idx(self) -> usize {
93 self.0 as usize
94 }
95
96 #[inline]
107 pub const fn is_var_len_null(self) -> bool {
108 self.0 == Self::VAR_LEN_NULL.0
109 }
110
111 pub const VAR_LEN_NULL: Self = Self(0);
122
123 #[inline]
125 pub const fn is_at_end(self) -> bool {
126 self.0 == Self::PAGE_END.0
127 }
128
129 pub const PAGE_END: Self = Self(PAGE_DATA_SIZE as u16);
135
136 #[inline]
138 pub const fn range(self, size: Size) -> Range<usize> {
139 range_move(0..size.len(), self.idx())
140 }
141}
142
143impl PartialEq<Size> for PageOffset {
144 #[inline]
145 fn eq(&self, other: &Size) -> bool {
146 self.0 == other.0
147 }
148}
149
150impl AddAssign<Size> for PageOffset {
151 #[inline]
152 fn add_assign(&mut self, rhs: Size) {
153 self.0 += rhs.0;
154 }
155}
156
157impl SubAssign<Size> for PageOffset {
158 #[inline]
159 fn sub_assign(&mut self, rhs: Size) {
160 self.0 -= rhs.0;
161 }
162}
163
164impl Div<Size> for PageOffset {
165 type Output = usize;
166
167 #[inline]
168 fn div(self, size: Size) -> Self::Output {
169 self.idx() / size.len()
170 }
171}
172
173impl fmt::LowerHex for PageOffset {
174 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
175 fmt::LowerHex::fmt(&self.0, f)
176 }
177}
178
179#[inline]
181pub fn max_rows_in_page(fixed_row_size: Size) -> usize {
182 PageOffset::PAGE_END.idx().div_ceil(fixed_row_size.len())
183}
184
185#[cfg_attr(any(test, feature = "proptest"), derive(proptest_derive::Arbitrary))]
187#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
188pub struct PageIndex(#[cfg_attr(any(test, feature = "proptest"), proptest(strategy = "0..MASK_PI"))] pub u64);
189
190impl MemoryUsage for PageIndex {}
191
192static_assert_size!(PageIndex, 8);
193
194impl PageIndex {
195 pub const MAX: Self = Self(MASK_PI);
200
201 #[inline]
203 pub const fn idx(self) -> usize {
204 self.0 as usize
205 }
206}
207
208#[derive(Clone, Copy, PartialEq, Eq, Debug)]
218#[cfg_attr(any(test, feature = "proptest"), derive(proptest_derive::Arbitrary))]
219pub struct SquashedOffset(pub u8);
220
221impl MemoryUsage for SquashedOffset {}
222
223static_assert_size!(SquashedOffset, 1);
224
225impl SquashedOffset {
226 #[inline]
228 pub const fn is_tx_state(self) -> bool {
229 self.0 == Self::TX_STATE.0
230 }
231
232 pub const TX_STATE: Self = Self(0);
234
235 #[inline]
237 pub const fn is_committed_state(self) -> bool {
238 self.0 == Self::COMMITTED_STATE.0
239 }
240
241 pub const COMMITTED_STATE: Self = Self(1);
243}
244
245#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
249#[repr(transparent)]
250pub struct RowPointer(pub u64);
251
252impl MemoryUsage for RowPointer {}
253
254static_assert_size!(RowPointer, 8);
255
256const OFFSET_RB: u64 = 0;
258const BITS_RB: u64 = 1;
259const OFFSET_PI: u64 = OFFSET_RB + BITS_RB;
261const BITS_PI: u64 = 39;
262const OFFSET_PO: u64 = OFFSET_PI + BITS_PI;
263const BITS_PO: u64 = 16;
264const OFFSET_SQ: u64 = OFFSET_PO + BITS_PO;
265const BITS_SQ: u64 = 8;
266
267const MASK_RB: u64 = (1 << BITS_RB) - 1;
269const MASK_PI: u64 = (1 << BITS_PI) - 1;
270const MASK_PO: u64 = (1 << BITS_PO) - 1;
271const MASK_SQ: u64 = (1 << BITS_SQ) - 1;
272
273const MASK_ZERO_RB: u64 = !(MASK_RB << OFFSET_RB);
275const MASK_ZERO_PI: u64 = !(MASK_PI << OFFSET_PI);
276const MASK_ZERO_PO: u64 = !(MASK_PO << OFFSET_PO);
277const MASK_ZERO_SQ: u64 = !(MASK_SQ << OFFSET_SQ);
278
279impl RowPointer {
280 #[inline(always)]
284 pub const fn new(
285 reserved_bit: bool,
286 page_index: PageIndex,
287 page_offset: PageOffset,
288 squashed_offset: SquashedOffset,
289 ) -> Self {
290 Self(0)
291 .with_reserved_bit(reserved_bit)
292 .with_squashed_offset(squashed_offset)
293 .with_page_index(page_index)
294 .with_page_offset(page_offset)
295 }
296
297 #[inline(always)]
299 pub const fn reserved_bit(self) -> bool {
300 ((self.0 >> OFFSET_RB) & MASK_RB) != 0
301 }
302
303 #[inline(always)]
305 pub const fn page_index(self) -> PageIndex {
306 PageIndex((self.0 >> OFFSET_PI) & MASK_PI)
307 }
308
309 #[inline(always)]
311 pub const fn page_offset(self) -> PageOffset {
312 PageOffset(((self.0 >> OFFSET_PO) & MASK_PO) as u16)
313 }
314
315 #[inline(always)]
317 pub const fn squashed_offset(self) -> SquashedOffset {
318 SquashedOffset(((self.0 >> OFFSET_SQ) & MASK_SQ) as u8)
319 }
320
321 #[inline(always)]
324 pub const fn with_reserved_bit(self, reserved_bit: bool) -> Self {
325 Self::with(self, reserved_bit as u64, MASK_RB, OFFSET_RB, MASK_ZERO_RB)
326 }
327
328 #[inline(always)]
331 pub const fn with_page_index(self, page_index: PageIndex) -> Self {
332 Self::with(self, page_index.0, MASK_PI, OFFSET_PI, MASK_ZERO_PI)
333 }
334
335 #[inline(always)]
338 pub const fn with_page_offset(self, page_offset: PageOffset) -> Self {
339 Self::with(self, page_offset.0 as u64, MASK_PO, OFFSET_PO, MASK_ZERO_PO)
340 }
341
342 #[inline(always)]
345 pub const fn with_squashed_offset(self, squashed_offset: SquashedOffset) -> Self {
346 Self::with(self, squashed_offset.0 as u64, MASK_SQ, OFFSET_SQ, MASK_ZERO_SQ)
347 }
348
349 #[inline(always)]
350 const fn with(self, v: u64, mask: u64, offset: u64, zero: u64) -> Self {
351 let vmoved = (v & mask) << offset;
352 Self((self.0 & zero) | vmoved)
353 }
354}
355
356impl fmt::Debug for RowPointer {
357 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
358 write!(
359 f,
360 "RowPointer(r: {:?}, pi: {:?}, po: {:?}, so: {:?})",
361 self.reserved_bit() as u8,
362 self.page_index().idx(),
363 self.page_offset().idx(),
364 self.squashed_offset().0,
365 )
366 }
367}
368
369#[cfg(test)]
370mod test {
371 use super::*;
372 use proptest::prelude::*;
373
374 proptest! {
375 #[test]
376 fn row_pointer_ops_work(
377 ((rb1, pi1, po1, so1), (rb2, pi2, po2, so2)) in (
378 (any::<bool>(), any::<PageIndex>(), any::<PageOffset>(), any::<SquashedOffset>()),
379 (any::<bool>(), any::<PageIndex>(), any::<PageOffset>(), any::<SquashedOffset>()),
380 )) {
381 let check = |rb, pi, po, so, ptr: RowPointer| {
382 prop_assert_eq!(rb, ptr.reserved_bit());
383 prop_assert_eq!(pi, ptr.page_index());
384 prop_assert_eq!(po, ptr.page_offset());
385 prop_assert_eq!(so, ptr.squashed_offset());
386 Ok(())
387 };
388 let ptr = RowPointer::new(rb1, pi1, po1, so1);
389 check(rb1, pi1, po1, so1, ptr)?;
390 check(rb2, pi1, po1, so1, ptr.with_reserved_bit(rb2))?;
391 check(rb1, pi2, po1, so1, ptr.with_page_index(pi2))?;
392 check(rb1, pi1, po2, so1, ptr.with_page_offset(po2))?;
393 check(rb1, pi1, po1, so2, ptr.with_squashed_offset(so2))?;
394 }
395 }
396}