splinter_rs/codec/
encoder.rs1use std::ops::RangeInclusive;
2
3use bitvec::{boxed::BitBox, order::Lsb0};
4use bytes::BufMut;
5use crc64fast_nvme::Digest;
6use zerocopy::{IntoBytes, transmute_ref};
7
8use crate::{
9 PartitionRead,
10 codec::{footer::Footer, runs_ref::EncodedRun, tree_ref::TreeIndexBuilder},
11 level::{Block, Level},
12 partition::Partition,
13 partition_kind::PartitionKind,
14 traits::TruncateFrom,
15};
16
17pub struct Encoder<B: BufMut> {
18 buf: B,
19 bytes_written: usize,
20 checksum: Digest,
21 wrote_footer: bool,
22}
23
24impl<B: BufMut> Encoder<B> {
25 pub fn new(buf: B) -> Self {
26 Self {
27 buf,
28 bytes_written: 0,
29 checksum: Digest::new(),
30 wrote_footer: false,
31 }
32 }
33
34 pub fn into_inner(self) -> B {
36 self.buf
37 }
38
39 pub(crate) fn write_splinter(&mut self, splinter: &[u8]) {
41 self.buf.put_slice(splinter);
42 self.bytes_written += splinter.len();
43 self.wrote_footer = true;
45 }
46
47 pub(crate) fn write_footer(&mut self) {
49 assert!(
50 !self.wrote_footer,
51 "invalid encoder usage: footer already present"
52 );
53 self.wrote_footer = true;
54 let footer = Footer::from_checksum(self.checksum.sum64());
55 self.put_slice(footer.as_bytes());
56 }
57
58 pub(crate) fn bytes_written(&self) -> usize {
61 self.bytes_written
62 }
63
64 pub(crate) fn put_bitmap_partition(&mut self, bitmap: &BitBox<u64, Lsb0>) {
66 self.put_bitmap_raw(bitmap);
67 }
68
69 pub(crate) fn put_vec_partition<L: Level>(&mut self, values: &[L::Value]) {
71 self.put_iter::<L>(values.iter().copied());
72 self.put_length::<L>(values.len());
73 }
74
75 pub(crate) fn put_run_partition<L: Level>(
77 &mut self,
78 runs: impl Iterator<Item = RangeInclusive<L::Value>>,
79 ) {
80 let mut num_runs = 0;
81 for run in runs {
82 let run: EncodedRun<L> = run.into();
83 self.put_slice(run.as_bytes());
84 num_runs += 1;
85 }
86 self.put_length::<L>(num_runs);
87 }
88
89 pub(crate) fn put_tree_index<L: Level>(&mut self, tree_index_builder: TreeIndexBuilder<L>) {
91 let (num_children, segments, offsets, cardinalities) = tree_index_builder.build();
92 assert!(
93 num_children > 0 && num_children <= Block::MAX_LEN,
94 "num_children out of range"
95 );
96
97 self.put_iter::<L>(offsets);
99 self.put_iter::<L>(cardinalities);
100
101 match segments {
102 Partition::Full => {}
103 Partition::Bitmap(p) => self.put_bitmap_raw(p.as_bitbox()),
104 Partition::Vec(p) => self.put_iter::<Block>(p.iter()),
105 Partition::Run(_) | Partition::Tree(_) => unreachable!(),
106 }
107
108 self.put_length::<Block>(num_children);
109 }
110
111 pub(crate) fn put_iter<L: Level>(&mut self, values: impl Iterator<Item = L::Value>) {
112 for value in values {
113 self.put_value::<L>(value);
114 }
115 }
116
117 pub(crate) fn put_kind(&mut self, k: PartitionKind) {
118 let d = [k as u8];
119 self.put_slice(&d)
120 }
121
122 #[inline]
123 fn put_length<L: Level>(&mut self, len: usize) {
124 assert_ne!(len, 0, "Length must be greater than zero");
125 self.put_value::<L>(L::Value::truncate_from(len - 1));
127 }
128
129 #[inline]
130 fn put_value<L: Level>(&mut self, v: L::Value) {
131 self.put_slice(L::ValueUnaligned::from(v).as_bytes());
132 }
133
134 fn put_bitmap_raw(&mut self, bitmap: &BitBox<u64, Lsb0>) {
135 let raw = bitmap.as_raw_slice();
136 static_assertions::assert_cfg!(target_endian = "little");
137 let raw: &[zerocopy::U64<zerocopy::LittleEndian>] = transmute_ref!(raw);
138 self.put_slice(raw.as_bytes());
139 }
140
141 fn put_slice(&mut self, data: &[u8]) {
142 self.checksum.write(data);
143 self.buf.put_slice(data);
144 self.bytes_written += data.len();
145 }
146}