1use std::fmt;
8use std::num::NonZeroU64;
9
10use crate::encoding::{
11 append_u16_le, append_u32_le, append_u64_le, read_u16_le, read_u32_le, read_u64_le,
12};
13use crate::{ObjectId, PageData, PageNumber};
14
15#[derive(
22 Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, serde::Serialize, serde::Deserialize,
23)]
24#[repr(transparent)]
25pub struct TxnId(NonZeroU64);
26
27impl TxnId {
28 pub const MAX_RAW: u64 = (1_u64 << 62) - 1;
30
31 #[inline]
33 pub const fn new(raw: u64) -> Option<Self> {
34 if raw > Self::MAX_RAW {
35 return None;
36 }
37 match NonZeroU64::new(raw) {
38 Some(nz) => Some(Self(nz)),
39 None => None,
40 }
41 }
42
43 #[inline]
45 pub const fn get(self) -> u64 {
46 self.0.get()
47 }
48
49 #[inline]
51 pub const fn checked_next(self) -> Option<Self> {
52 Self::new(self.get().wrapping_add(1))
53 }
54}
55
56impl fmt::Display for TxnId {
57 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
58 write!(f, "txn#{}", self.get())
59 }
60}
61
62impl TryFrom<u64> for TxnId {
63 type Error = InvalidTxnId;
64
65 fn try_from(value: u64) -> Result<Self, Self::Error> {
66 Self::new(value).ok_or(InvalidTxnId { raw: value })
67 }
68}
69
70#[derive(Debug, Clone, Copy, PartialEq, Eq)]
72pub struct InvalidTxnId {
73 raw: u64,
74}
75
76impl fmt::Display for InvalidTxnId {
77 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
78 write!(
79 f,
80 "invalid TxnId {} (must satisfy 1 <= id <= {})",
81 self.raw,
82 TxnId::MAX_RAW
83 )
84 }
85}
86
87impl std::error::Error for InvalidTxnId {}
88
89#[derive(
91 Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, serde::Serialize, serde::Deserialize,
92)]
93#[repr(transparent)]
94pub struct CommitSeq(u64);
95
96impl CommitSeq {
97 pub const ZERO: Self = Self(0);
98
99 #[inline]
100 pub const fn new(raw: u64) -> Self {
101 Self(raw)
102 }
103
104 #[inline]
105 pub const fn get(self) -> u64 {
106 self.0
107 }
108
109 #[inline]
110 #[must_use]
111 pub const fn next(self) -> Self {
112 Self(self.0.wrapping_add(1))
113 }
114}
115
116impl fmt::Display for CommitSeq {
117 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
118 write!(f, "cs#{}", self.get())
119 }
120}
121
122#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
124#[repr(transparent)]
125pub struct TxnEpoch(u32);
126
127impl TxnEpoch {
128 #[inline]
129 pub const fn new(raw: u32) -> Self {
130 Self(raw)
131 }
132
133 #[inline]
134 pub const fn get(self) -> u32 {
135 self.0
136 }
137}
138
139#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
141pub struct TxnToken {
142 pub id: TxnId,
143 pub epoch: TxnEpoch,
144}
145
146impl TxnToken {
147 #[inline]
148 pub const fn new(id: TxnId, epoch: TxnEpoch) -> Self {
149 Self { id, epoch }
150 }
151}
152
153#[derive(
155 Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, serde::Serialize, serde::Deserialize,
156)]
157#[repr(transparent)]
158pub struct SchemaEpoch(u64);
159
160impl SchemaEpoch {
161 pub const ZERO: Self = Self(0);
162
163 #[inline]
164 pub const fn new(raw: u64) -> Self {
165 Self(raw)
166 }
167
168 #[inline]
169 pub const fn get(self) -> u64 {
170 self.0
171 }
172}
173
174#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
178pub struct Snapshot {
179 pub high: CommitSeq,
180 pub schema_epoch: SchemaEpoch,
181}
182
183impl Snapshot {
184 #[inline]
185 pub const fn new(high: CommitSeq, schema_epoch: SchemaEpoch) -> Self {
186 Self { high, schema_epoch }
187 }
188}
189
190#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
195#[repr(transparent)]
196pub struct VersionPointer(u64);
197
198impl VersionPointer {
199 #[inline]
200 pub const fn new(raw: u64) -> Self {
201 Self(raw)
202 }
203
204 #[inline]
205 pub const fn get(self) -> u64 {
206 self.0
207 }
208}
209
210#[derive(Debug, Clone, PartialEq, Eq)]
212pub struct PageVersion {
213 pub pgno: PageNumber,
214 pub commit_seq: CommitSeq,
215 pub created_by: TxnToken,
216 pub data: PageData,
217 pub prev: Option<VersionPointer>,
218}
219
220#[derive(
225 Debug, Clone, Copy, PartialEq, Eq, Hash, Default, serde::Serialize, serde::Deserialize,
226)]
227pub enum OperatingMode {
228 #[default]
232 Compatibility,
233 Native,
236}
237
238impl fmt::Display for OperatingMode {
239 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
240 match self {
241 Self::Compatibility => f.write_str("compatibility"),
242 Self::Native => f.write_str("native"),
243 }
244 }
245}
246
247impl OperatingMode {
248 #[must_use]
250 pub fn from_pragma(s: &str) -> Option<Self> {
251 let lower = s.trim().to_ascii_lowercase();
252 match lower.as_str() {
253 "compatibility" | "compat" => Some(Self::Compatibility),
254 "native" => Some(Self::Native),
255 _ => None,
256 }
257 }
258
259 #[must_use]
261 pub const fn is_native(self) -> bool {
262 matches!(self, Self::Native)
263 }
264
265 #[must_use]
267 pub const fn legacy_readers_allowed(self) -> bool {
268 matches!(self, Self::Compatibility)
269 }
270}
271
272#[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)]
279pub struct CommitCapsule {
280 pub object_id: ObjectId,
282 pub snapshot_basis: CommitSeq,
284 pub intent_log: Vec<IntentOp>,
286 pub page_deltas: Vec<(PageNumber, Vec<u8>)>,
288 pub read_set_digest: [u8; 32],
290 pub write_set_digest: [u8; 32],
292 pub read_witness_refs: Vec<ObjectId>,
294 pub write_witness_refs: Vec<ObjectId>,
296 pub dependency_edge_refs: Vec<ObjectId>,
298 pub merge_witness_refs: Vec<ObjectId>,
300}
301
302#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
309pub struct CommitMarker {
310 pub commit_seq: CommitSeq,
311 pub commit_time_unix_ns: u64,
313 pub capsule_object_id: ObjectId,
314 pub proof_object_id: ObjectId,
315 pub prev_marker: Option<ObjectId>,
317 pub integrity_hash: [u8; 16],
319}
320
321pub const COMMIT_MARKER_RECORD_V1_SIZE: usize = 88;
327
328const COMMIT_MARKER_RECORD_VERSION: u8 = 1;
330
331impl CommitMarker {
332 #[must_use]
334 pub fn to_record_bytes(&self) -> [u8; COMMIT_MARKER_RECORD_V1_SIZE] {
335 let mut buf = [0u8; COMMIT_MARKER_RECORD_V1_SIZE];
336 buf[0] = COMMIT_MARKER_RECORD_VERSION;
337 buf[1] = 0; buf[2..10].copy_from_slice(&self.commit_seq.get().to_le_bytes());
341 buf[10..18].copy_from_slice(&self.commit_time_unix_ns.to_le_bytes());
343 buf[18..34].copy_from_slice(self.capsule_object_id.as_bytes());
345 buf[34..50].copy_from_slice(self.proof_object_id.as_bytes());
347 if let Some(prev) = self.prev_marker {
349 buf[50..66].copy_from_slice(prev.as_bytes());
350 }
351 buf[66] = u8::from(self.prev_marker.is_some());
353 buf[67..83].copy_from_slice(&self.integrity_hash);
355 buf
357 }
358
359 #[must_use]
361 pub fn from_record_bytes(data: &[u8; COMMIT_MARKER_RECORD_V1_SIZE]) -> Option<Self> {
362 if data[0] != COMMIT_MARKER_RECORD_VERSION {
363 return None;
364 }
365
366 let commit_seq = CommitSeq::new(u64::from_le_bytes(data[2..10].try_into().ok()?));
367 let commit_time_unix_ns = u64::from_le_bytes(data[10..18].try_into().ok()?);
368 let capsule_object_id = ObjectId::from_bytes(data[18..34].try_into().ok()?);
369 let proof_object_id = ObjectId::from_bytes(data[34..50].try_into().ok()?);
370 let has_prev = data[66] != 0;
371 let prev_marker = if has_prev {
372 Some(ObjectId::from_bytes(data[50..66].try_into().ok()?))
373 } else {
374 None
375 };
376 let mut integrity_hash = [0u8; 16];
377 integrity_hash.copy_from_slice(&data[67..83]);
378
379 Some(Self {
380 commit_seq,
381 commit_time_unix_ns,
382 capsule_object_id,
383 proof_object_id,
384 prev_marker,
385 integrity_hash,
386 })
387 }
388
389 #[must_use]
392 pub fn compute_integrity_hash(&self) -> [u8; 16] {
393 let mut buf = Vec::with_capacity(74);
394 append_u64_le(&mut buf, self.commit_seq.get());
395 append_u64_le(&mut buf, self.commit_time_unix_ns);
396 buf.extend_from_slice(self.capsule_object_id.as_bytes());
397 buf.extend_from_slice(self.proof_object_id.as_bytes());
398 if let Some(prev) = self.prev_marker {
399 buf.push(1);
400 buf.extend_from_slice(prev.as_bytes());
401 } else {
402 buf.push(0);
403 buf.extend_from_slice(&[0u8; 16]);
404 }
405 let hash128 = xxhash_rust::xxh3::xxh3_128(&buf);
406 hash128.to_le_bytes()
407 }
408
409 #[must_use]
411 pub fn new(
412 commit_seq: CommitSeq,
413 commit_time_unix_ns: u64,
414 capsule_object_id: ObjectId,
415 proof_object_id: ObjectId,
416 prev_marker: Option<ObjectId>,
417 ) -> Self {
418 let mut marker = Self {
419 commit_seq,
420 commit_time_unix_ns,
421 capsule_object_id,
422 proof_object_id,
423 prev_marker,
424 integrity_hash: [0u8; 16],
425 };
426 marker.integrity_hash = marker.compute_integrity_hash();
427 marker
428 }
429
430 #[must_use]
432 pub fn verify_integrity(&self) -> bool {
433 self.integrity_hash == self.compute_integrity_hash()
434 }
435}
436
437#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
446pub struct Oti {
447 pub f: u64,
449 pub al: u16,
451 pub t: u32,
453 pub z: u32,
455 pub n: u32,
457}
458
459pub const OTI_WIRE_SIZE: usize = 22;
461
462impl Oti {
463 #[must_use]
465 pub fn to_bytes(self) -> [u8; OTI_WIRE_SIZE] {
466 let mut as_vec = Vec::with_capacity(OTI_WIRE_SIZE);
467 append_u64_le(&mut as_vec, self.f);
468 append_u16_le(&mut as_vec, self.al);
469 append_u32_le(&mut as_vec, self.t);
470 append_u32_le(&mut as_vec, self.z);
471 append_u32_le(&mut as_vec, self.n);
472
473 let mut buf = [0u8; OTI_WIRE_SIZE];
474 buf.copy_from_slice(&as_vec);
475 buf
476 }
477
478 #[must_use]
482 pub fn from_bytes(data: &[u8]) -> Option<Self> {
483 if data.len() < OTI_WIRE_SIZE {
484 return None;
485 }
486 Some(Self {
487 f: read_u64_le(&data[0..8])?,
488 al: read_u16_le(&data[8..10])?,
489 t: read_u32_le(&data[10..14])?,
490 z: read_u32_le(&data[14..18])?,
491 n: read_u32_le(&data[18..22])?,
492 })
493 }
494}
495
496#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
498pub struct DecodeProof {
499 pub object_id: ObjectId,
500 pub oti: Oti,
501}
502
503pub use crate::cx::{Budget, Cx};
507
508#[derive(
510 Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, serde::Serialize, serde::Deserialize,
511)]
512pub enum Outcome {
513 Ok,
514 Err,
515 Cancelled,
516 Panicked,
517}
518
519#[derive(
521 Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, serde::Serialize, serde::Deserialize,
522)]
523#[repr(transparent)]
524pub struct EpochId(u64);
525
526impl EpochId {
527 pub const ZERO: Self = Self(0);
529
530 #[inline]
531 pub const fn new(raw: u64) -> Self {
532 Self(raw)
533 }
534
535 #[inline]
536 pub const fn get(self) -> u64 {
537 self.0
538 }
539
540 #[must_use]
544 pub const fn next(self) -> Option<Self> {
545 match self.0.checked_add(1) {
546 Some(val) => Some(Self(val)),
547 None => None,
548 }
549 }
550}
551
552#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
554pub struct SymbolValidityWindow {
555 pub from_epoch: EpochId,
556 pub to_epoch: EpochId,
557}
558
559impl SymbolValidityWindow {
560 #[must_use]
561 pub const fn new(from_epoch: EpochId, to_epoch: EpochId) -> Self {
562 Self {
563 from_epoch,
564 to_epoch,
565 }
566 }
567
568 #[must_use]
570 pub const fn default_window(current_epoch: EpochId) -> Self {
571 Self {
572 from_epoch: EpochId::ZERO,
573 to_epoch: current_epoch,
574 }
575 }
576
577 #[must_use]
582 pub const fn contains(&self, epoch: EpochId) -> bool {
583 epoch.0 >= self.from_epoch.0 && epoch.0 <= self.to_epoch.0
584 }
585}
586
587#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
589#[repr(transparent)]
590pub struct RemoteCap([u8; 16]);
591
592impl RemoteCap {
593 #[must_use]
594 pub const fn from_bytes(bytes: [u8; 16]) -> Self {
595 Self(bytes)
596 }
597
598 #[must_use]
599 pub const fn as_bytes(&self) -> &[u8; 16] {
600 &self.0
601 }
602}
603
604#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
606#[repr(transparent)]
607pub struct SymbolAuthMasterKeyCap([u8; 32]);
608
609impl SymbolAuthMasterKeyCap {
610 #[must_use]
611 pub const fn from_bytes(bytes: [u8; 32]) -> Self {
612 Self(bytes)
613 }
614
615 #[must_use]
616 pub const fn as_bytes(&self) -> &[u8; 32] {
617 &self.0
618 }
619}
620
621#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
623#[repr(transparent)]
624pub struct IdempotencyKey([u8; 16]);
625
626impl IdempotencyKey {
627 #[must_use]
628 pub const fn from_bytes(bytes: [u8; 16]) -> Self {
629 Self(bytes)
630 }
631
632 #[must_use]
633 pub const fn as_bytes(&self) -> &[u8; 16] {
634 &self.0
635 }
636}
637
638#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
640pub struct Saga {
641 pub key: IdempotencyKey,
642}
643
644impl IdempotencyKey {
645 #[must_use]
650 pub fn derive(ecs_epoch: u64, request_bytes: &[u8]) -> Self {
651 let mut hasher = blake3::Hasher::new();
652 hasher.update(b"fsqlite:idempotency:v1");
653 hasher.update(&ecs_epoch.to_le_bytes());
654 hasher.update(request_bytes);
655 let digest = hasher.finalize();
656 let mut out = [0_u8; 16];
657 out.copy_from_slice(&digest.as_bytes()[..16]);
658 Self(out)
659 }
660}
661
662impl Saga {
663 #[must_use]
665 pub const fn new(key: IdempotencyKey) -> Self {
666 Self { key }
667 }
668
669 #[must_use]
671 pub const fn key(self) -> IdempotencyKey {
672 self.key
673 }
674}
675
676#[derive(
678 Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, serde::Serialize, serde::Deserialize,
679)]
680#[repr(transparent)]
681pub struct Region(u32);
682
683impl Region {
684 #[inline]
685 pub const fn new(raw: u32) -> Self {
686 Self(raw)
687 }
688
689 #[inline]
690 pub const fn get(self) -> u32 {
691 self.0
692 }
693}
694
695#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
701pub enum WitnessKey {
702 Page(PageNumber),
704 Cell { btree_root: PageNumber, tag: u64 },
708 ByteRange {
710 page: PageNumber,
711 start: u32,
712 len: u32,
713 },
714 KeyRange {
716 btree_root: PageNumber,
717 lo: Vec<u8>,
718 hi: Vec<u8>,
719 },
720 Custom { namespace: u32, bytes: Vec<u8> },
722}
723
724impl WitnessKey {
725 #[must_use]
730 pub fn cell_tag(btree_root: PageNumber, canonical_key_bytes: &[u8]) -> u64 {
731 use xxhash_rust::xxh3::xxh3_64;
732 let mut buf =
733 Vec::with_capacity(b"fsqlite:witness:cell:v1".len() + 4 + canonical_key_bytes.len());
734 buf.extend_from_slice(b"fsqlite:witness:cell:v1");
735 buf.extend_from_slice(&btree_root.get().to_le_bytes());
736 buf.extend_from_slice(canonical_key_bytes);
737 xxh3_64(&buf)
739 }
740
741 #[must_use]
743 pub fn for_cell_read(btree_root: PageNumber, canonical_key_bytes: &[u8]) -> Self {
744 Self::Cell {
745 btree_root,
746 tag: Self::cell_tag(btree_root, canonical_key_bytes),
747 }
748 }
749
750 #[must_use]
754 pub fn for_range_scan(leaf_pages: &[PageNumber]) -> Vec<Self> {
755 leaf_pages.iter().copied().map(Self::Page).collect()
756 }
757
758 #[must_use]
763 pub fn for_point_write(
764 btree_root: PageNumber,
765 canonical_key_bytes: &[u8],
766 leaf_pgno: PageNumber,
767 ) -> (Self, Self) {
768 let cell = Self::Cell {
769 btree_root,
770 tag: Self::cell_tag(btree_root, canonical_key_bytes),
771 };
772 let page = Self::Page(leaf_pgno);
773 (cell, page)
774 }
775
776 #[must_use]
778 pub fn is_page(&self) -> bool {
779 matches!(self, Self::Page(_))
780 }
781
782 #[must_use]
784 pub fn is_cell(&self) -> bool {
785 matches!(self, Self::Cell { .. })
786 }
787}
788
789#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
791pub struct RangeKey {
792 pub level: u8,
793 pub hash_prefix: u32,
794}
795
796#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
798pub struct ReadWitness {
799 pub txn: TxnId,
800 pub key: WitnessKey,
801}
802
803#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
805pub struct WriteWitness {
806 pub txn: TxnId,
807 pub key: WitnessKey,
808}
809
810#[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
812pub struct WitnessIndexSegment {
813 pub epoch: EpochId,
814 pub reads: Vec<ReadWitness>,
815 pub writes: Vec<WriteWitness>,
816}
817
818#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
820pub struct DependencyEdge {
821 pub from: TxnId,
822 pub to: TxnId,
823 pub key_basis: WitnessKey,
824 pub observed_by: TxnId,
825}
826
827#[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
833pub struct CommitProof {
834 pub commit_seq: CommitSeq,
836 pub edges: Vec<DependencyEdge>,
838 pub evidence_refs: Vec<ObjectId>,
840}
841
842#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
844#[repr(transparent)]
845pub struct TableId(u32);
846
847impl TableId {
848 #[inline]
849 pub const fn new(raw: u32) -> Self {
850 Self(raw)
851 }
852
853 #[inline]
854 pub const fn get(self) -> u32 {
855 self.0
856 }
857}
858
859#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
861#[repr(transparent)]
862pub struct IndexId(u32);
863
864impl IndexId {
865 #[inline]
866 pub const fn new(raw: u32) -> Self {
867 Self(raw)
868 }
869
870 #[inline]
871 pub const fn get(self) -> u32 {
872 self.0
873 }
874}
875
876#[derive(
878 Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, serde::Serialize, serde::Deserialize,
879)]
880#[repr(transparent)]
881pub struct RowId(i64);
882
883impl RowId {
884 pub const MAX: Self = Self(i64::MAX);
886
887 #[inline]
888 pub const fn new(raw: i64) -> Self {
889 Self(raw)
890 }
891
892 #[inline]
893 pub const fn get(self) -> i64 {
894 self.0
895 }
896}
897
898#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
900pub enum RowIdMode {
901 Normal,
903 AutoIncrement,
906}
907
908#[derive(Debug, Clone)]
915pub struct RowIdAllocator {
916 mode: RowIdMode,
917 sequence_high_water: i64,
919}
920
921impl RowIdAllocator {
922 pub const fn new(mode: RowIdMode) -> Self {
924 Self {
925 mode,
926 sequence_high_water: 0,
927 }
928 }
929
930 pub fn allocate(&mut self, max_existing: Option<RowId>) -> Result<RowId, RowIdExhausted> {
936 let max_val = max_existing.map_or(0, RowId::get);
937
938 match self.mode {
939 RowIdMode::Normal => {
940 if max_val < i64::MAX {
941 Ok(RowId::new(max_val + 1))
942 } else {
943 Err(RowIdExhausted)
946 }
947 }
948 RowIdMode::AutoIncrement => {
949 let base = max_val.max(self.sequence_high_water);
950 if base == i64::MAX {
951 return Err(RowIdExhausted);
952 }
953 let next = base + 1;
954 self.sequence_high_water = next;
955 Ok(RowId::new(next))
956 }
957 }
958 }
959
960 pub const fn sequence_high_water(&self) -> i64 {
962 self.sequence_high_water
963 }
964
965 pub fn set_sequence_high_water(&mut self, val: i64) {
967 self.sequence_high_water = val;
968 }
969}
970
971#[derive(Debug, Clone, PartialEq, Eq)]
973pub struct RowIdExhausted;
974
975impl std::fmt::Display for RowIdExhausted {
976 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
977 f.write_str("database or object is full (rowid exhausted)")
978 }
979}
980
981#[derive(
983 Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, serde::Serialize, serde::Deserialize,
984)]
985#[repr(transparent)]
986pub struct ColumnIdx(u32);
987
988impl ColumnIdx {
989 #[inline]
990 pub const fn new(raw: u32) -> Self {
991 Self(raw)
992 }
993
994 #[inline]
995 pub const fn get(self) -> u32 {
996 self.0
997 }
998}
999
1000#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
1006pub enum BtreeRef {
1007 Table(TableId),
1008 Index(IndexId),
1009}
1010
1011#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
1013pub enum SemanticKeyKind {
1014 TableRow,
1015 IndexEntry,
1016}
1017
1018#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
1022pub struct SemanticKeyRef {
1023 pub btree: BtreeRef,
1024 pub kind: SemanticKeyKind,
1025 pub key_digest: [u8; 16],
1026}
1027
1028impl SemanticKeyRef {
1029 const DOMAIN_SEP: &'static [u8] = b"fsqlite:btree:key:v1";
1031
1032 #[must_use]
1034 pub fn compute_digest(
1035 kind: SemanticKeyKind,
1036 btree: BtreeRef,
1037 canonical_key_bytes: &[u8],
1038 ) -> [u8; 16] {
1039 let mut hasher = blake3::Hasher::new();
1040 hasher.update(Self::DOMAIN_SEP);
1041 hasher.update(&[match kind {
1042 SemanticKeyKind::TableRow => 0,
1043 SemanticKeyKind::IndexEntry => 1,
1044 }]);
1045 match btree {
1046 BtreeRef::Table(id) => {
1047 hasher.update(&[0]);
1048 hasher.update(&id.get().to_le_bytes());
1049 }
1050 BtreeRef::Index(id) => {
1051 hasher.update(&[1]);
1052 hasher.update(&id.get().to_le_bytes());
1053 }
1054 }
1055 hasher.update(canonical_key_bytes);
1056 let hash = hasher.finalize();
1057 let bytes = hash.as_bytes();
1058 let mut digest = [0u8; 16];
1059 digest.copy_from_slice(&bytes[..16]);
1060 digest
1061 }
1062
1063 #[must_use]
1065 pub fn new(btree: BtreeRef, kind: SemanticKeyKind, canonical_key_bytes: &[u8]) -> Self {
1066 let key_digest = Self::compute_digest(kind, btree, canonical_key_bytes);
1067 Self {
1068 btree,
1069 kind,
1070 key_digest,
1071 }
1072 }
1073}
1074
1075bitflags::bitflags! {
1076 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
1078 pub struct StructuralEffects: u32 {
1079 const NONE = 0;
1081 const PAGE_SPLIT = 1;
1083 const PAGE_MERGE = 2;
1085 const BALANCE_MULTI_PAGE = 4;
1087 const OVERFLOW_ALLOC = 8;
1089 const OVERFLOW_MUTATE = 16;
1091 const FREELIST_MUTATE = 32;
1093 const POINTER_MAP_MUTATE = 64;
1095 const DEFRAG_MOVE_CELLS = 128;
1097 }
1098}
1099
1100impl serde::Serialize for StructuralEffects {
1101 fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
1102 self.bits().serialize(serializer)
1103 }
1104}
1105
1106impl<'de> serde::Deserialize<'de> for StructuralEffects {
1107 fn deserialize<D: serde::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
1108 let bits = u32::deserialize(deserializer)?;
1109 Self::from_bits(bits).ok_or_else(|| {
1110 serde::de::Error::custom(format!("invalid StructuralEffects bits: {bits:#x}"))
1111 })
1112 }
1113}
1114
1115impl Default for StructuralEffects {
1116 fn default() -> Self {
1117 Self::NONE
1118 }
1119}
1120
1121#[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
1123pub struct IntentFootprint {
1124 pub reads: Vec<SemanticKeyRef>,
1125 pub writes: Vec<SemanticKeyRef>,
1126 pub structural: StructuralEffects,
1127}
1128
1129impl IntentFootprint {
1130 #[must_use]
1132 pub fn empty() -> Self {
1133 Self {
1134 reads: Vec::new(),
1135 writes: Vec::new(),
1136 structural: StructuralEffects::NONE,
1137 }
1138 }
1139}
1140
1141impl Default for IntentFootprint {
1142 fn default() -> Self {
1143 Self::empty()
1144 }
1145}
1146
1147#[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)]
1152pub enum RebaseExpr {
1153 ColumnRef(ColumnIdx),
1155 Literal(crate::SqliteValue),
1157 UnaryOp {
1159 op: RebaseUnaryOp,
1160 operand: Box<Self>,
1161 },
1162 BinaryOp {
1164 op: RebaseBinaryOp,
1165 left: Box<Self>,
1166 right: Box<Self>,
1167 },
1168 FunctionCall { name: String, args: Vec<Self> },
1170 Cast { expr: Box<Self>, type_name: String },
1172 Case {
1174 operand: Option<Box<Self>>,
1175 when_clauses: Vec<(Self, Self)>,
1176 else_clause: Option<Box<Self>>,
1177 },
1178 Coalesce(Vec<Self>),
1180 NullIf { left: Box<Self>, right: Box<Self> },
1182 Concat { left: Box<Self>, right: Box<Self> },
1184}
1185
1186#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
1188pub enum RebaseUnaryOp {
1189 Negate,
1190 BitwiseNot,
1191 Not,
1192}
1193
1194#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
1196pub enum RebaseBinaryOp {
1197 Add,
1198 Subtract,
1199 Multiply,
1200 Divide,
1201 Remainder,
1202 BitwiseAnd,
1203 BitwiseOr,
1204 ShiftLeft,
1205 ShiftRight,
1206}
1207
1208#[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)]
1210pub enum IntentOpKind {
1211 Insert {
1212 table: TableId,
1213 key: RowId,
1214 record: Vec<u8>,
1215 },
1216 Delete {
1217 table: TableId,
1218 key: RowId,
1219 },
1220 Update {
1221 table: TableId,
1222 key: RowId,
1223 new_record: Vec<u8>,
1224 },
1225 IndexInsert {
1226 index: IndexId,
1227 key: Vec<u8>,
1228 rowid: RowId,
1229 },
1230 IndexDelete {
1231 index: IndexId,
1232 key: Vec<u8>,
1233 rowid: RowId,
1234 },
1235 UpdateExpression {
1237 table: TableId,
1238 key: RowId,
1239 column_updates: Vec<(ColumnIdx, RebaseExpr)>,
1240 },
1241}
1242
1243#[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)]
1245pub struct IntentOp {
1246 pub schema_epoch: u64,
1247 pub footprint: IntentFootprint,
1248 pub op: IntentOpKind,
1249}
1250
1251pub type IntentLog = Vec<IntentOp>;
1253
1254#[derive(Debug, Clone, PartialEq, Eq)]
1256pub struct PageHistory {
1257 pub pgno: PageNumber,
1258 pub versions: Vec<PageVersion>,
1259}
1260
1261#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
1266pub struct ArcCache;
1267
1268#[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
1273pub struct RootManifest {
1274 pub schema_epoch: SchemaEpoch,
1275 pub root_page: PageNumber,
1276 pub ecs_epoch: EpochId,
1278}
1279
1280#[derive(
1282 Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, serde::Serialize, serde::Deserialize,
1283)]
1284#[repr(transparent)]
1285pub struct TxnSlot(u32);
1286
1287impl TxnSlot {
1288 #[inline]
1289 pub const fn new(raw: u32) -> Self {
1290 Self(raw)
1291 }
1292
1293 #[inline]
1294 pub const fn get(self) -> u32 {
1295 self.0
1296 }
1297}
1298
1299#[cfg(test)]
1300mod tests {
1301 use std::collections::HashSet;
1302 use std::time::Duration;
1303
1304 use proptest::prelude::*;
1305
1306 use crate::PayloadHash;
1307
1308 use super::*;
1309
1310 #[test]
1311 fn test_txn_id_nonzero_enforced() {
1312 assert!(TxnId::new(0).is_none());
1313 assert!(TxnId::try_from(0_u64).is_err());
1314 assert!(TxnId::new(1).is_some());
1315 assert!(TxnId::new(TxnId::MAX_RAW).is_some());
1316 }
1317
1318 #[test]
1319 fn test_txn_id_62_bit_max() {
1320 assert!(TxnId::new(TxnId::MAX_RAW + 1).is_none());
1321 assert!(TxnId::try_from(TxnId::MAX_RAW + 1).is_err());
1322 }
1323
1324 #[test]
1325 fn test_object_id_16_bytes_blake3_truncation() {
1326 let header = b"hdr:v1";
1327 let payload = b"payload";
1328 let oid = ObjectId::derive(header, PayloadHash::blake3(payload));
1329 assert_eq!(oid.as_bytes().len(), ObjectId::LEN);
1330 }
1331
1332 #[test]
1333 fn test_object_id_content_addressed() {
1334 let header = b"hdr:v1";
1335 let payload = b"payload";
1336 let a = ObjectId::derive(header, PayloadHash::blake3(payload));
1337 let b = ObjectId::derive(header, PayloadHash::blake3(payload));
1338 assert_eq!(a, b);
1339
1340 let c = ObjectId::derive(header, PayloadHash::blake3(b"payload2"));
1341 assert_ne!(a, c);
1342 }
1343
1344 #[test]
1345 fn prop_object_id_collision_resistance() {
1346 let header = b"hdr:v1";
1347 let mut ids = HashSet::<ObjectId>::with_capacity(10_000);
1348
1349 let mut state: u64 = 0xD6E8_FEB8_6659_FD93;
1350 for i in 0..10_000_u64 {
1351 state = state
1353 .wrapping_mul(6_364_136_223_846_793_005_u64)
1354 .wrapping_add(1_442_695_040_888_963_407_u64);
1355
1356 let mut payload = [0_u8; 32];
1357 payload[..8].copy_from_slice(&i.to_le_bytes());
1358 payload[8..16].copy_from_slice(&state.to_le_bytes());
1359 payload[16..24].copy_from_slice(&state.rotate_left(17).to_le_bytes());
1360 payload[24..32].copy_from_slice(&state.rotate_left(41).to_le_bytes());
1361
1362 let oid = ObjectId::derive(header, PayloadHash::blake3(&payload));
1363 assert!(ids.insert(oid), "ObjectId collision at i={i}");
1364 }
1365 }
1366
1367 #[test]
1368 fn test_snapshot_fields() {
1369 let snap = Snapshot::new(CommitSeq::new(7), SchemaEpoch::new(9));
1370 assert_eq!(snap.high.get(), 7);
1371 assert_eq!(snap.schema_epoch.get(), 9);
1372 }
1373
1374 #[test]
1375 fn test_oti_field_widths_allow_large_symbol_size() {
1376 let oti = Oti {
1378 f: 1,
1379 al: 4,
1380 t: 65_536,
1381 z: 1,
1382 n: 1,
1383 };
1384 assert_eq!(oti.t, 65_536);
1385 }
1386
1387 #[test]
1388 fn test_budget_product_lattice_semantics() {
1389 let a = Budget {
1390 deadline: Some(Duration::from_millis(100)),
1391 poll_quota: 10,
1392 cost_quota: Some(500),
1393 priority: 1,
1394 };
1395 let b = Budget {
1396 deadline: Some(Duration::from_millis(50)),
1397 poll_quota: 20,
1398 cost_quota: Some(400),
1399 priority: 9,
1400 };
1401 let c = a.meet(b);
1402 assert_eq!(c.deadline, Some(Duration::from_millis(50)));
1403 assert_eq!(c.poll_quota, 10);
1404 assert_eq!(c.cost_quota, Some(400));
1405 assert_eq!(c.priority, 9);
1406 }
1407
1408 #[test]
1409 fn test_outcome_ordering_lattice() {
1410 assert!(Outcome::Ok < Outcome::Err);
1411 assert!(Outcome::Err < Outcome::Cancelled);
1412 assert!(Outcome::Cancelled < Outcome::Panicked);
1413 }
1414
1415 #[test]
1416 fn test_witness_key_variants_exhaustive() {
1417 let pn = PageNumber::new(1).unwrap();
1418
1419 let a = WitnessKey::Page(pn);
1420 let b = WitnessKey::Cell {
1421 btree_root: pn,
1422 tag: 7,
1423 };
1424 let c = WitnessKey::ByteRange {
1425 page: pn,
1426 start: 0,
1427 len: 16,
1428 };
1429
1430 assert!(matches!(a, WitnessKey::Page(_)));
1431 assert!(matches!(b, WitnessKey::Cell { .. }));
1432 assert!(matches!(c, WitnessKey::ByteRange { .. }));
1433 }
1434
1435 #[test]
1436 fn test_all_glossary_types_derive_debug_clone() {
1437 fn assert_debug_clone<T: fmt::Debug + Clone>() {}
1438
1439 assert_debug_clone::<TxnId>();
1440 assert_debug_clone::<CommitSeq>();
1441 assert_debug_clone::<TxnEpoch>();
1442 assert_debug_clone::<TxnToken>();
1443 assert_debug_clone::<SchemaEpoch>();
1444 assert_debug_clone::<Snapshot>();
1445 assert_debug_clone::<VersionPointer>();
1446 assert_debug_clone::<PageVersion>();
1447 assert_debug_clone::<ObjectId>();
1448 assert_debug_clone::<CommitCapsule>();
1449 assert_debug_clone::<CommitMarker>();
1450 assert_debug_clone::<Oti>();
1451 assert_debug_clone::<DecodeProof>();
1452 assert_debug_clone::<Cx<crate::cx::ComputeCaps>>();
1453 assert_debug_clone::<Budget>();
1454 assert_debug_clone::<Outcome>();
1455 assert_debug_clone::<EpochId>();
1456 assert_debug_clone::<SymbolValidityWindow>();
1457 assert_debug_clone::<RemoteCap>();
1458 assert_debug_clone::<SymbolAuthMasterKeyCap>();
1459 assert_debug_clone::<IdempotencyKey>();
1460 assert_debug_clone::<Saga>();
1461 assert_debug_clone::<Region>();
1462 assert_debug_clone::<WitnessKey>();
1463 assert_debug_clone::<RangeKey>();
1464 assert_debug_clone::<ReadWitness>();
1465 assert_debug_clone::<WriteWitness>();
1466 assert_debug_clone::<WitnessIndexSegment>();
1467 assert_debug_clone::<DependencyEdge>();
1468 assert_debug_clone::<CommitProof>();
1469 assert_debug_clone::<TableId>();
1470 assert_debug_clone::<IndexId>();
1471 assert_debug_clone::<RowId>();
1472 assert_debug_clone::<ColumnIdx>();
1473 assert_debug_clone::<BtreeRef>();
1474 assert_debug_clone::<SemanticKeyKind>();
1475 assert_debug_clone::<SemanticKeyRef>();
1476 assert_debug_clone::<StructuralEffects>();
1477 assert_debug_clone::<IntentFootprint>();
1478 assert_debug_clone::<RebaseExpr>();
1479 assert_debug_clone::<RebaseUnaryOp>();
1480 assert_debug_clone::<RebaseBinaryOp>();
1481 assert_debug_clone::<IntentOpKind>();
1482 assert_debug_clone::<IntentOp>();
1483 assert_debug_clone::<PageHistory>();
1484 assert_debug_clone::<ArcCache>();
1485 assert_debug_clone::<RootManifest>();
1486 assert_debug_clone::<TxnSlot>();
1487 assert_debug_clone::<OperatingMode>();
1488 }
1489
1490 #[test]
1491 fn test_remote_cap_from_bytes_roundtrip() {
1492 let raw = [0xAB_u8; 16];
1493 let cap = RemoteCap::from_bytes(raw);
1494 assert_eq!(cap.as_bytes(), &raw);
1495 }
1496
1497 #[test]
1498 fn test_idempotency_key_derivation_is_deterministic() {
1499 let req = b"fetch:object=42";
1500 let a = IdempotencyKey::derive(7, req);
1501 let b = IdempotencyKey::derive(7, req);
1502 let c = IdempotencyKey::derive(8, req);
1503 assert_eq!(a, b);
1504 assert_ne!(a, c);
1505 }
1506
1507 #[test]
1508 fn test_remote_cap_roundtrip() {
1509 let raw = [0xAB_u8; 16];
1510 let cap = RemoteCap::from_bytes(raw);
1511 assert_eq!(cap.as_bytes(), &raw);
1512 }
1513
1514 #[test]
1515 fn test_symbol_auth_master_key_cap_roundtrip() {
1516 let raw = [0xCD_u8; 32];
1517 let cap = SymbolAuthMasterKeyCap::from_bytes(raw);
1518 assert_eq!(cap.as_bytes(), &raw);
1519 }
1520
1521 #[test]
1522 fn test_idempotency_key_roundtrip() {
1523 let raw = [0x11_u8; 16];
1524 let key = IdempotencyKey::from_bytes(raw);
1525 assert_eq!(key.as_bytes(), &raw);
1526 }
1527
1528 #[test]
1529 fn test_saga_constructor() {
1530 let key = IdempotencyKey::from_bytes([0x22_u8; 16]);
1531 let saga = Saga::new(key);
1532 assert_eq!(saga.key(), key);
1533 }
1534
1535 fn arb_budget() -> impl Strategy<Value = Budget> {
1536 (
1537 prop::option::of(any::<u64>()),
1538 any::<u32>(),
1539 prop::option::of(any::<u64>()),
1540 any::<u8>(),
1541 )
1542 .prop_map(|(deadline_ms, poll_quota, cost_quota, priority)| Budget {
1543 deadline: deadline_ms.map(Duration::from_millis),
1544 poll_quota,
1545 cost_quota,
1546 priority,
1547 })
1548 }
1549
1550 proptest! {
1551 #[test]
1552 fn prop_budget_combine_associative(a in arb_budget(), b in arb_budget(), c in arb_budget()) {
1553 prop_assert_eq!(a.meet(b).meet(c), a.meet(b.meet(c)));
1554 }
1555
1556 #[test]
1557 fn prop_budget_combine_commutative(a in arb_budget(), b in arb_budget()) {
1558 prop_assert_eq!(a.meet(b), b.meet(a));
1559 }
1560 }
1561
1562 #[test]
1565 fn test_rowid_reuse_without_autoincrement() {
1566 let mut alloc = RowIdAllocator::new(RowIdMode::Normal);
1567 let r = alloc.allocate(Some(RowId::new(5))).unwrap();
1569 assert_eq!(r.get(), 6);
1570
1571 let r = alloc.allocate(Some(RowId::new(3))).unwrap();
1573 assert_eq!(r.get(), 4);
1574 }
1575
1576 #[test]
1577 fn test_autoincrement_no_reuse() {
1578 let mut alloc = RowIdAllocator::new(RowIdMode::AutoIncrement);
1579 let r = alloc.allocate(Some(RowId::new(5))).unwrap();
1581 assert_eq!(r.get(), 6);
1582
1583 let r = alloc.allocate(Some(RowId::new(3))).unwrap();
1586 assert_eq!(r.get(), 7);
1587 }
1588
1589 #[test]
1590 fn test_sqlite_sequence_updates() {
1591 let mut alloc = RowIdAllocator::new(RowIdMode::AutoIncrement);
1592 assert_eq!(alloc.sequence_high_water(), 0);
1593
1594 let _ = alloc.allocate(Some(RowId::new(10))).unwrap();
1595 assert_eq!(alloc.sequence_high_water(), 11);
1596
1597 alloc.set_sequence_high_water(100);
1599 let r = alloc.allocate(Some(RowId::new(50))).unwrap();
1600 assert_eq!(r.get(), 101);
1601 assert_eq!(alloc.sequence_high_water(), 101);
1602 }
1603
1604 #[test]
1605 fn test_max_rowid_exhausted_autoincrement() {
1606 let mut alloc = RowIdAllocator::new(RowIdMode::AutoIncrement);
1607 let result = alloc.allocate(Some(RowId::MAX));
1609 assert!(result.is_err());
1610 }
1611
1612 #[test]
1613 fn test_max_rowid_exhausted_normal() {
1614 let mut alloc = RowIdAllocator::new(RowIdMode::Normal);
1615 let result = alloc.allocate(Some(RowId::MAX));
1618 assert!(result.is_err());
1619 }
1620
1621 #[test]
1622 fn test_rowid_allocate_empty_table() {
1623 let mut alloc = RowIdAllocator::new(RowIdMode::Normal);
1624 let r = alloc.allocate(None).unwrap();
1625 assert_eq!(r.get(), 1);
1626
1627 let mut alloc = RowIdAllocator::new(RowIdMode::AutoIncrement);
1628 let r = alloc.allocate(None).unwrap();
1629 assert_eq!(r.get(), 1);
1630 }
1631
1632 #[test]
1635 fn test_intent_op_all_variants_encode_decode_roundtrip() {
1636 use crate::SqliteValue;
1637
1638 let variants: Vec<IntentOpKind> = vec![
1639 IntentOpKind::Insert {
1640 table: TableId::new(1),
1641 key: RowId::new(100),
1642 record: vec![0x01, 0x02, 0x03],
1643 },
1644 IntentOpKind::Delete {
1645 table: TableId::new(2),
1646 key: RowId::new(200),
1647 },
1648 IntentOpKind::Update {
1649 table: TableId::new(3),
1650 key: RowId::new(300),
1651 new_record: vec![0x04, 0x05],
1652 },
1653 IntentOpKind::IndexInsert {
1654 index: IndexId::new(10),
1655 key: vec![0xAA, 0xBB],
1656 rowid: RowId::new(400),
1657 },
1658 IntentOpKind::IndexDelete {
1659 index: IndexId::new(11),
1660 key: vec![0xCC],
1661 rowid: RowId::new(500),
1662 },
1663 IntentOpKind::UpdateExpression {
1664 table: TableId::new(4),
1665 key: RowId::new(600),
1666 column_updates: vec![
1667 (
1668 ColumnIdx::new(0),
1669 RebaseExpr::BinaryOp {
1670 op: RebaseBinaryOp::Add,
1671 left: Box::new(RebaseExpr::ColumnRef(ColumnIdx::new(0))),
1672 right: Box::new(RebaseExpr::Literal(SqliteValue::Integer(1))),
1673 },
1674 ),
1675 (
1676 ColumnIdx::new(2),
1677 RebaseExpr::Coalesce(vec![
1678 RebaseExpr::ColumnRef(ColumnIdx::new(2)),
1679 RebaseExpr::Literal(SqliteValue::Integer(0)),
1680 ]),
1681 ),
1682 ],
1683 },
1684 ];
1685
1686 for variant in &variants {
1687 let op = IntentOp {
1688 schema_epoch: 42,
1689 footprint: IntentFootprint::empty(),
1690 op: variant.clone(),
1691 };
1692
1693 let json = serde_json::to_string(&op).expect("serialize must succeed");
1694 let decoded: IntentOp = serde_json::from_str(&json).expect("deserialize must succeed");
1695
1696 assert_eq!(decoded, op, "roundtrip failed for variant: {variant:?}");
1697 }
1698 }
1699
1700 #[test]
1701 fn test_semantic_key_ref_digest_stable() {
1702 let table = BtreeRef::Table(TableId::new(42));
1703 let key_bytes = b"canonical_key_data";
1704
1705 let d1 = SemanticKeyRef::compute_digest(SemanticKeyKind::TableRow, table, key_bytes);
1707 let d2 = SemanticKeyRef::compute_digest(SemanticKeyKind::TableRow, table, key_bytes);
1708 assert_eq!(d1, d2, "digest must be stable across calls");
1709
1710 let skr = SemanticKeyRef::new(table, SemanticKeyKind::TableRow, key_bytes);
1712 assert_eq!(skr.key_digest, d1);
1713
1714 let d3 = SemanticKeyRef::compute_digest(SemanticKeyKind::TableRow, table, b"different_key");
1716 assert_ne!(d1, d3);
1717
1718 let d4 = SemanticKeyRef::compute_digest(SemanticKeyKind::IndexEntry, table, key_bytes);
1720 assert_ne!(d1, d4);
1721
1722 let index = BtreeRef::Index(IndexId::new(42));
1724 let d5 = SemanticKeyRef::compute_digest(SemanticKeyKind::TableRow, index, key_bytes);
1725 assert_ne!(d1, d5);
1726
1727 assert_eq!(d1.len(), 16);
1729 }
1730
1731 #[test]
1732 fn test_structural_effects_bitflags() {
1733 assert_eq!(StructuralEffects::NONE.bits(), 0);
1735 assert!(StructuralEffects::NONE.is_empty());
1736
1737 let leaf = StructuralEffects::NONE;
1739 assert!(!leaf.contains(StructuralEffects::PAGE_SPLIT));
1740 assert!(!leaf.contains(StructuralEffects::FREELIST_MUTATE));
1741
1742 let split_overflow = StructuralEffects::PAGE_SPLIT | StructuralEffects::OVERFLOW_ALLOC;
1744 assert!(split_overflow.contains(StructuralEffects::PAGE_SPLIT));
1745 assert!(split_overflow.contains(StructuralEffects::OVERFLOW_ALLOC));
1746 assert!(!split_overflow.contains(StructuralEffects::PAGE_MERGE));
1747
1748 let all = StructuralEffects::PAGE_SPLIT
1750 | StructuralEffects::PAGE_MERGE
1751 | StructuralEffects::BALANCE_MULTI_PAGE
1752 | StructuralEffects::OVERFLOW_ALLOC
1753 | StructuralEffects::OVERFLOW_MUTATE
1754 | StructuralEffects::FREELIST_MUTATE
1755 | StructuralEffects::POINTER_MAP_MUTATE
1756 | StructuralEffects::DEFRAG_MOVE_CELLS;
1757 assert!(all.contains(StructuralEffects::FREELIST_MUTATE));
1758 assert!(all.contains(StructuralEffects::DEFRAG_MOVE_CELLS));
1759
1760 let json = serde_json::to_string(&split_overflow).expect("serialize");
1762 let decoded: StructuralEffects = serde_json::from_str(&json).expect("deserialize");
1763 assert_eq!(decoded, split_overflow);
1764 }
1765
1766 #[test]
1767 fn test_rowid_allocator_monotone_no_collision() {
1768 let mut alloc = RowIdAllocator::new(RowIdMode::Normal);
1771 let mut ids: Vec<RowId> = Vec::new();
1772
1773 for _ in 0..5 {
1775 let max_existing = ids.last().copied();
1776 let r = alloc.allocate(max_existing).unwrap();
1777 ids.push(r);
1778 }
1779
1780 for _ in 0..5 {
1782 let max_existing = ids.last().copied();
1783 let r = alloc.allocate(max_existing).unwrap();
1784 ids.push(r);
1785 }
1786
1787 let raw_ids: Vec<i64> = ids.iter().map(|r| r.get()).collect();
1789 for window in raw_ids.windows(2) {
1790 assert!(
1791 window[1] > window[0],
1792 "RowIds must be strictly monotonically increasing: {} <= {}",
1793 window[0],
1794 window[1]
1795 );
1796 }
1797
1798 let unique: HashSet<i64> = raw_ids.iter().copied().collect();
1800 assert_eq!(unique.len(), raw_ids.len(), "RowIds must be disjoint");
1801 }
1802
1803 #[test]
1804 fn test_rowid_allocator_bump_on_explicit_rowid() {
1805 let mut alloc = RowIdAllocator::new(RowIdMode::AutoIncrement);
1806
1807 let r1 = alloc.allocate(None).unwrap();
1809 assert_eq!(r1.get(), 1);
1810
1811 alloc.set_sequence_high_water(1000);
1813
1814 let r2 = alloc.allocate(Some(RowId::new(999))).unwrap();
1816 assert!(
1817 r2.get() >= 1001,
1818 "allocator must bump past explicit rowid 1000, got {}",
1819 r2.get()
1820 );
1821
1822 let r3 = alloc.allocate(Some(r2)).unwrap();
1824 assert!(r3.get() > r2.get());
1825 }
1826}