1use super::util::range_move;
5use crate::{static_assert_size, MemoryUsage};
6use ahash::RandomState;
7use core::fmt;
8use core::ops::{AddAssign, Div, Mul, Range, SubAssign};
9use derive_more::{Add, Sub};
10use spacetimedb_data_structures::map::ValidAsIdentityHash;
11use spacetimedb_sats::{impl_deserialize, impl_serialize};
12
13pub type Byte = u8;
19
20pub type Bytes = [Byte];
22
23pub(crate) const PAGE_SIZE: usize = u16::MAX as usize + 1;
27
28pub(crate) const PAGE_HEADER_SIZE: usize = 64;
32
33pub const PAGE_DATA_SIZE: usize = PAGE_SIZE - PAGE_HEADER_SIZE;
39
40#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
53#[cfg_attr(any(test, feature = "proptest"), derive(proptest_derive::Arbitrary))]
54pub struct RowHash(pub u64);
55
56impl MemoryUsage for RowHash {}
57
58static_assert_size!(RowHash, 8);
59
60impl ValidAsIdentityHash for RowHash {}
62
63impl RowHash {
64 pub fn hasher_builder() -> RandomState {
66 RandomState::with_seed(0x42)
68 }
69}
70
71#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash, Add, Sub)]
73pub struct Size(pub u16);
74
75impl MemoryUsage for Size {}
76
77impl_serialize!([] Size, (self, ser) => self.0.serialize(ser));
79impl_deserialize!([] Size, de => u16::deserialize(de).map(Size));
80
81impl Size {
82 #[inline]
84 #[allow(clippy::len_without_is_empty)]
85 pub const fn len(self) -> usize {
86 self.0 as usize
87 }
88}
89
90impl Mul<usize> for Size {
91 type Output = Size;
92
93 #[inline]
94 fn mul(self, rhs: usize) -> Self::Output {
95 Size((self.len() * rhs) as u16)
96 }
97}
98
99#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Add, Sub, bytemuck::NoUninit)]
101#[repr(transparent)]
102#[cfg_attr(any(test, feature = "proptest"), derive(proptest_derive::Arbitrary))]
103pub struct PageOffset(
104 #[cfg_attr(any(test, feature = "proptest"), proptest(strategy = "0..PageOffset::PAGE_END.0"))] pub u16,
105);
106
107impl MemoryUsage for PageOffset {}
108
109static_assert_size!(PageOffset, 2);
110
111impl_serialize!([] PageOffset, (self, ser) => self.0.serialize(ser));
113impl_deserialize!([] PageOffset, de => u16::deserialize(de).map(PageOffset));
114
115impl PageOffset {
116 #[inline]
118 pub const fn idx(self) -> usize {
119 self.0 as usize
120 }
121
122 #[inline]
133 pub const fn is_var_len_null(self) -> bool {
134 self.0 == Self::VAR_LEN_NULL.0
135 }
136
137 pub const VAR_LEN_NULL: Self = Self(0);
148
149 #[inline]
151 pub const fn is_at_end(self) -> bool {
152 self.0 == Self::PAGE_END.0
153 }
154
155 pub const PAGE_END: Self = Self(PAGE_DATA_SIZE as u16);
161
162 #[inline]
164 pub const fn range(self, size: Size) -> Range<usize> {
165 range_move(0..size.len(), self.idx())
166 }
167}
168
169impl PartialEq<Size> for PageOffset {
170 #[inline]
171 fn eq(&self, other: &Size) -> bool {
172 self.0 == other.0
173 }
174}
175
176impl AddAssign<Size> for PageOffset {
177 #[inline]
178 fn add_assign(&mut self, rhs: Size) {
179 self.0 += rhs.0;
180 }
181}
182
183impl SubAssign<Size> for PageOffset {
184 #[inline]
185 fn sub_assign(&mut self, rhs: Size) {
186 self.0 -= rhs.0;
187 }
188}
189
190impl Div<Size> for PageOffset {
191 type Output = usize;
192
193 #[inline]
194 fn div(self, size: Size) -> Self::Output {
195 self.idx() / size.len()
196 }
197}
198
199impl fmt::LowerHex for PageOffset {
200 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
201 fmt::LowerHex::fmt(&self.0, f)
202 }
203}
204
205#[inline]
207pub fn max_rows_in_page(fixed_row_size: Size) -> usize {
208 PageOffset::PAGE_END.idx().div_ceil(fixed_row_size.len())
209}
210
211#[cfg_attr(any(test, feature = "proptest"), derive(proptest_derive::Arbitrary))]
213#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
214pub struct PageIndex(#[cfg_attr(any(test, feature = "proptest"), proptest(strategy = "0..MASK_PI"))] pub u64);
215
216impl MemoryUsage for PageIndex {}
217
218static_assert_size!(PageIndex, 8);
219
220impl PageIndex {
221 pub const MAX: Self = Self(MASK_PI);
226
227 #[inline]
229 pub const fn idx(self) -> usize {
230 self.0 as usize
231 }
232}
233
234#[derive(Clone, Copy, PartialEq, Eq, Debug)]
244#[cfg_attr(any(test, feature = "proptest"), derive(proptest_derive::Arbitrary))]
245pub struct SquashedOffset(pub u8);
246
247impl MemoryUsage for SquashedOffset {}
248
249static_assert_size!(SquashedOffset, 1);
250
251impl SquashedOffset {
252 #[inline]
254 pub const fn is_tx_state(self) -> bool {
255 self.0 == Self::TX_STATE.0
256 }
257
258 pub const TX_STATE: Self = Self(0);
260
261 #[inline]
263 pub const fn is_committed_state(self) -> bool {
264 self.0 == Self::COMMITTED_STATE.0
265 }
266
267 pub const COMMITTED_STATE: Self = Self(1);
269}
270
271#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
275#[repr(transparent)]
276pub struct RowPointer(pub u64);
277
278impl MemoryUsage for RowPointer {}
279
280static_assert_size!(RowPointer, 8);
281
282const OFFSET_RB: u64 = 0;
284const BITS_RB: u64 = 1;
285const OFFSET_PI: u64 = OFFSET_RB + BITS_RB;
287const BITS_PI: u64 = 39;
288const OFFSET_PO: u64 = OFFSET_PI + BITS_PI;
289const BITS_PO: u64 = 16;
290const OFFSET_SQ: u64 = OFFSET_PO + BITS_PO;
291const BITS_SQ: u64 = 8;
292
293const MASK_RB: u64 = (1 << BITS_RB) - 1;
295const MASK_PI: u64 = (1 << BITS_PI) - 1;
296const MASK_PO: u64 = (1 << BITS_PO) - 1;
297const MASK_SQ: u64 = (1 << BITS_SQ) - 1;
298
299const MASK_ZERO_RB: u64 = !(MASK_RB << OFFSET_RB);
301const MASK_ZERO_PI: u64 = !(MASK_PI << OFFSET_PI);
302const MASK_ZERO_PO: u64 = !(MASK_PO << OFFSET_PO);
303const MASK_ZERO_SQ: u64 = !(MASK_SQ << OFFSET_SQ);
304
305impl RowPointer {
306 #[inline(always)]
310 pub const fn new(
311 reserved_bit: bool,
312 page_index: PageIndex,
313 page_offset: PageOffset,
314 squashed_offset: SquashedOffset,
315 ) -> Self {
316 Self(0)
317 .with_reserved_bit(reserved_bit)
318 .with_squashed_offset(squashed_offset)
319 .with_page_index(page_index)
320 .with_page_offset(page_offset)
321 }
322
323 #[inline(always)]
325 pub const fn reserved_bit(self) -> bool {
326 ((self.0 >> OFFSET_RB) & MASK_RB) != 0
327 }
328
329 #[inline(always)]
331 pub const fn page_index(self) -> PageIndex {
332 PageIndex((self.0 >> OFFSET_PI) & MASK_PI)
333 }
334
335 #[inline(always)]
337 pub const fn page_offset(self) -> PageOffset {
338 PageOffset(((self.0 >> OFFSET_PO) & MASK_PO) as u16)
339 }
340
341 #[inline(always)]
343 pub const fn squashed_offset(self) -> SquashedOffset {
344 SquashedOffset(((self.0 >> OFFSET_SQ) & MASK_SQ) as u8)
345 }
346
347 #[inline(always)]
350 pub const fn with_reserved_bit(self, reserved_bit: bool) -> Self {
351 Self::with(self, reserved_bit as u64, MASK_RB, OFFSET_RB, MASK_ZERO_RB)
352 }
353
354 #[inline(always)]
357 pub const fn with_page_index(self, page_index: PageIndex) -> Self {
358 Self::with(self, page_index.0, MASK_PI, OFFSET_PI, MASK_ZERO_PI)
359 }
360
361 #[inline(always)]
364 pub const fn with_page_offset(self, page_offset: PageOffset) -> Self {
365 Self::with(self, page_offset.0 as u64, MASK_PO, OFFSET_PO, MASK_ZERO_PO)
366 }
367
368 #[inline(always)]
371 pub const fn with_squashed_offset(self, squashed_offset: SquashedOffset) -> Self {
372 Self::with(self, squashed_offset.0 as u64, MASK_SQ, OFFSET_SQ, MASK_ZERO_SQ)
373 }
374
375 #[inline(always)]
376 const fn with(self, v: u64, mask: u64, offset: u64, zero: u64) -> Self {
377 let vmoved = (v & mask) << offset;
378 Self((self.0 & zero) | vmoved)
379 }
380}
381
382impl fmt::Debug for RowPointer {
383 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
384 write!(
385 f,
386 "RowPointer(r: {:?}, pi: {:?}, po: {:?}, so: {:?})",
387 self.reserved_bit() as u8,
388 self.page_index().idx(),
389 self.page_offset().idx(),
390 self.squashed_offset().0,
391 )
392 }
393}
394
395#[cfg(test)]
396mod test {
397 use super::*;
398 use proptest::prelude::*;
399
400 proptest! {
401 #[test]
402 fn row_pointer_ops_work(
403 ((rb1, pi1, po1, so1), (rb2, pi2, po2, so2)) in (
404 (any::<bool>(), any::<PageIndex>(), any::<PageOffset>(), any::<SquashedOffset>()),
405 (any::<bool>(), any::<PageIndex>(), any::<PageOffset>(), any::<SquashedOffset>()),
406 )) {
407 let check = |rb, pi, po, so, ptr: RowPointer| {
408 prop_assert_eq!(rb, ptr.reserved_bit());
409 prop_assert_eq!(pi, ptr.page_index());
410 prop_assert_eq!(po, ptr.page_offset());
411 prop_assert_eq!(so, ptr.squashed_offset());
412 Ok(())
413 };
414 let ptr = RowPointer::new(rb1, pi1, po1, so1);
415 check(rb1, pi1, po1, so1, ptr)?;
416 check(rb2, pi1, po1, so1, ptr.with_reserved_bit(rb2))?;
417 check(rb1, pi2, po1, so1, ptr.with_page_index(pi2))?;
418 check(rb1, pi1, po2, so1, ptr.with_page_offset(po2))?;
419 check(rb1, pi1, po1, so2, ptr.with_squashed_offset(so2))?;
420 }
421 }
422}