btree_ondisk/
node_v1.rs

1use std::ptr;
2use std::fmt;
3use std::marker::PhantomPinned;
4use crate::ondisk::NodeHeader;
5
6pub const BTREE_NODE_LARGE: u8 = 0x01;
7pub const BTREE_NODE_LEVEL_DATA: usize = 0x00;
8pub const BTREE_NODE_LEVEL_MIN: usize = BTREE_NODE_LEVEL_DATA + 1;
9pub const BTREE_NODE_LEVEL_MAX: usize = 14;
10
11const MIN_ALIGNED: usize = 8;
12
13#[derive(Debug)]
14#[repr(C, align(8))]
15pub struct BtreeNode<'a, K, V> {
16    header: &'a mut NodeHeader,
17    keymap: &'a mut [K],
18    valmap: &'a mut [V],
19    capacity: usize,    // kv capacity of this btree node
20    ptr: *const u8,
21    size: usize,
22    id: Option<V>,
23    dirty: bool,
24    _pin: PhantomPinned,
25}
26
27impl<'a, K, V> BtreeNode<'a, K, V>
28    where
29        K: Copy + fmt::Display + std::cmp::PartialOrd,
30        V: Copy + fmt::Display
31{
32    pub fn from_slice(buf: &[u8]) -> Self {
33        let len = buf.len();
34        let hdr_size = std::mem::size_of::<NodeHeader>();
35        if len < hdr_size {
36            panic!("input buf size {} smaller than a valid btree node header size {}", len, hdr_size);
37        }
38
39        let ptr = buf.as_ptr() as *mut u8;
40        let header = unsafe {
41            ptr.cast::<NodeHeader>().as_mut().unwrap()
42        };
43
44        let key_size = std::mem::size_of::<K>();
45        let val_size = std::mem::size_of::<V>();
46        let capacity = (len - hdr_size) / (key_size + val_size);
47        assert!(capacity >= header.nchildren as usize,
48            "nchildren in header is large than it's capacity {} > {}", header.nchildren, capacity);
49
50        let keymap = unsafe {
51            std::slice::from_raw_parts_mut(ptr.add(hdr_size) as *mut K, capacity)
52        };
53
54        let valmap = unsafe {
55            std::slice::from_raw_parts_mut(ptr.add(hdr_size + capacity * key_size) as *mut V, capacity)
56        };
57
58        Self {
59            header: header,
60            keymap: keymap,
61            valmap: valmap,
62            capacity: capacity,
63            ptr: std::ptr::null(),
64            size: len,
65            id: None,
66            dirty: false,
67            _pin: PhantomPinned,
68        }
69    }
70
71    pub fn new(v: V, size: usize) -> Option<Self> {
72        if let Ok(aligned_layout) = std::alloc::Layout::from_size_align(size, MIN_ALIGNED) {
73            let ptr = unsafe { std::alloc::alloc_zeroed(aligned_layout) };
74            if ptr.is_null() {
75                return None;
76            }
77
78            let data = unsafe { std::slice::from_raw_parts(ptr, size) };
79            let mut node = Self::from_slice(data);
80            node.ptr = ptr;
81            node.id = Some(v);
82            return Some(node);
83        };
84        None
85    }
86
87    pub fn copy_from_slice(v: V, buf: &[u8]) -> Option<Self> {
88        let size = buf.len();
89        if let Ok(aligned_layout) = std::alloc::Layout::from_size_align(size, MIN_ALIGNED) {
90            let ptr = unsafe { std::alloc::alloc_zeroed(aligned_layout) };
91            if ptr.is_null() {
92                return None;
93            }
94
95            let data = unsafe { std::slice::from_raw_parts_mut(ptr, size) };
96            // copy data from buf to inner data
97            data.copy_from_slice(buf);
98            let mut node = Self::from_slice(data);
99            node.ptr = ptr;
100            node.id = Some(v);
101            return Some(node);
102        };
103        None
104    }
105
106    pub fn as_ref(&self) -> &[u8] {
107        unsafe {
108            std::slice::from_raw_parts(self.ptr as *const u8, self.size)
109        }
110    }
111
112    pub fn as_mut(&mut self) -> &mut [u8] {
113        unsafe {
114            std::slice::from_raw_parts_mut(self.ptr as *mut u8, self.size)
115        }
116    }
117
118    #[inline]
119    pub fn is_large(&self) -> bool {
120        if (self.header.flags & BTREE_NODE_LARGE) == BTREE_NODE_LARGE {
121            return true;
122        }
123        false
124    }
125
126    #[inline]
127    pub fn set_large(&mut self) {
128        self.header.flags |= BTREE_NODE_LARGE;
129    }
130
131    #[inline]
132    pub fn clear_large(&mut self) {
133        self.header.flags &= !BTREE_NODE_LARGE;
134    }
135
136    #[inline]
137    pub fn get_flags(&self) -> u8 {
138        self.header.flags
139    }
140
141    #[inline]
142    pub fn set_flags(&mut self, flags: usize) {
143        self.header.flags = flags as u8;
144    }
145
146    #[inline]
147    pub fn get_level(&self) -> usize {
148        self.header.level as usize
149    }
150
151    #[inline]
152    pub fn set_level(&mut self, level: usize) {
153        self.header.level = level as u8;
154    }
155
156    #[inline]
157    pub fn get_key(&self, index: usize) -> K {
158        self.keymap[index]
159    }
160
161    #[inline]
162    pub fn set_key(&mut self, index: usize, key: &K) {
163        self.keymap[index] = *key;
164    }
165
166    #[inline]
167    pub fn get_val(&self, index: usize) -> V {
168        self.valmap[index]
169    }
170
171    #[inline]
172    pub fn set_val(&mut self, index: usize, val: &V) {
173        self.valmap[index] = *val;
174    }
175
176    #[inline]
177    pub fn get_nchild(&self) -> usize {
178        self.header.nchildren as usize
179    }
180
181    #[inline]
182    pub fn set_nchild(&mut self, c: usize) {
183        self.header.nchildren = c as u16;
184    }
185
186    #[inline]
187    pub fn set_nchild_use_p(&self, c: usize) {
188        let nchild_ptr: *mut u16 = &self.header.nchildren as *const _ as *mut u16;
189        let nchild = c as u16;
190        unsafe {
191            std::ptr::copy::<u16>(ptr::addr_of!(nchild), nchild_ptr, 1);
192        }
193    }
194
195    #[inline]
196    pub fn get_capacity(&self) -> usize {
197        self.capacity
198    }
199
200    #[inline]
201    pub fn has_free_slots(&self) -> bool {
202        self.get_nchild() < self.capacity
203    }
204
205    #[inline]
206    pub fn get_nchild_min(&self) -> usize {
207        (self.capacity - 1) / 2 + 1
208    }
209
210    #[inline]
211    pub fn is_overflowing(&self) -> bool {
212        self.get_nchild() > self.get_nchild_min()
213    }
214
215    #[inline]
216    pub fn node_key(&self) -> &K {
217        &self.keymap[0]
218    }
219
220    #[inline]
221    pub fn id(&self) -> Option<&V> {
222        self.id.as_ref()
223    }
224
225    #[inline]
226    pub fn set_id(&mut self, v: V) {
227        self.id = Some(v);
228    }
229
230    #[inline]
231    pub fn init(&mut self, flags: usize, level: usize, nchild: usize) {
232        self.set_flags(flags);
233        self.set_level(level);
234        self.set_nchild(nchild);
235    }
236
237    #[inline]
238    pub fn init_root(&mut self, level: usize, is_large: bool) {
239        if is_large {
240            self.set_large();
241        }
242        self.set_level(level);
243    }
244
245    #[inline]
246    pub fn is_dirty(&self) -> bool {
247        self.dirty
248    }
249
250    #[inline]
251    pub fn mark_dirty(&mut self) {
252        self.dirty = true;
253    }
254
255    #[inline]
256    pub fn clear_dirty(&mut self) {
257        self.dirty = false;
258    }
259
260    // move n k,v pairs from head of right append to left
261    // and move rest of right to it's head
262    pub fn move_left(left: &BtreeNode<K, V>, right: &BtreeNode<K, V>, n: usize) {
263        let mut lnchild = left.get_nchild();
264        let mut rnchild = right.get_nchild();
265
266        let lkeymap_tail_ptr = &left.keymap[lnchild] as *const K as *mut K;
267        let lvalmap_tail_ptr = &left.valmap[lnchild] as *const V as *mut V;
268
269        let rkeymap_head_ptr = &right.keymap[0] as *const K as *mut K;
270        let rvalmap_head_ptr = &right.valmap[0] as *const V as *mut V;
271
272        let rkeymap_n_ptr = &right.keymap[n] as *const K as *mut K;
273        let rvalmap_n_ptr = &right.valmap[n] as *const V as *mut V;
274
275        unsafe {
276
277        // append right to left
278        ptr::copy::<K>(rkeymap_head_ptr, lkeymap_tail_ptr, n);
279        ptr::copy::<V>(rvalmap_head_ptr, lvalmap_tail_ptr, n);
280
281        // move rest of right to it's head
282        ptr::copy::<K>(rkeymap_n_ptr, rkeymap_head_ptr, rnchild - n);
283        ptr::copy::<V>(rvalmap_n_ptr, rvalmap_head_ptr, rnchild - n);
284
285        }
286
287        lnchild += n;
288        rnchild -= n;
289
290        left.set_nchild_use_p(lnchild);
291        right.set_nchild_use_p(rnchild);
292    }
293
294    // reserve space at head of right for n slot
295    // move n k,v pairs from tail of left to head of right
296    pub fn move_right(left: &BtreeNode<K, V>, right: &BtreeNode<K, V>, n: usize) {
297        let mut lnchild = left.get_nchild();
298        let mut rnchild = right.get_nchild();
299
300        let lkeymap_tailn_ptr = &left.keymap[lnchild - n] as *const K as *mut K;
301        let lvalmap_tailn_ptr = &left.valmap[lnchild - n] as *const V as *mut V;
302
303        let rkeymap_head_ptr = &right.keymap[0] as *const K as *mut K;
304        let rvalmap_head_ptr = &right.valmap[0] as *const V as *mut V;
305
306        let rkeymap_n_ptr = &right.keymap[n] as *const K as *mut K;
307        let rvalmap_n_ptr = &right.valmap[n] as *const V as *mut V;
308
309        unsafe {
310
311        // reserve n slot by move all child from head to n
312        std::ptr::copy::<K>(rkeymap_head_ptr, rkeymap_n_ptr, rnchild);
313        std::ptr::copy::<V>(rvalmap_head_ptr, rvalmap_n_ptr, rnchild);
314
315        // move n k,v pairs from tail of left to head of right
316        std::ptr::copy::<K>(lkeymap_tailn_ptr, rkeymap_head_ptr, n);
317        std::ptr::copy::<V>(lvalmap_tailn_ptr, rvalmap_head_ptr, n);
318
319        }
320
321        lnchild -= n;
322        rnchild += n;
323
324        left.set_nchild_use_p(lnchild);
325        right.set_nchild_use_p(rnchild);
326    }
327
328    // lookup key
329    // @return:
330    //   - (found, index)
331    //   - (notfound, index)
332    pub fn lookup(&self, key: &K) -> (bool, usize) {
333        let mut low: isize = 0;
334        let mut high: isize = (self.header.nchildren - 1) as isize;
335        let mut s = false;
336        let mut index = 0;
337
338        while low <= high {
339            index = (low + high) / 2;
340            let nkey = self.get_key(index as usize);
341            if &nkey == key {
342                return (true, index as usize);
343            } else if &nkey < key {
344                low = index + 1;
345                s = false;
346            } else {
347                high = index - 1;
348                s = true;
349            }
350        }
351
352        if self.get_level() > BTREE_NODE_LEVEL_MIN {
353            if s && index > 0 {
354                index -= 1;
355            }
356        } else if s == false {
357            index += 1;
358        }
359
360        return (false, index as usize);
361    }
362
363    // insert key val @ index
364    pub fn insert(&mut self, index: usize, key: &K, val: &V) {
365        let mut nchild = self.get_nchild();
366
367        if index < nchild {
368            unsafe {
369                let ksrc: *const K = &self.keymap[index] as *const K;
370                let vsrc: *const V = &self.valmap[index] as *const V;
371
372                let kdst: *mut K = &mut self.keymap[index + 1] as *mut K;
373                let vdst: *mut V = &mut self.valmap[index + 1] as *mut V;
374
375                let count = nchild - index;
376
377                std::ptr::copy::<K>(ksrc, kdst, count);
378                std::ptr::copy::<V>(vsrc, vdst, count);
379            }
380        }
381
382        self.set_key(index, key);
383        self.set_val(index, val);
384        nchild += 1;
385        self.set_nchild(nchild);
386    }
387
388    // delete key val @ index
389    pub fn delete(&mut self, index: usize, key: &mut K, val: &mut V) {
390        let mut nchild = self.get_nchild();
391
392        *key = self.get_key(index);
393        *val = self.get_val(index);
394
395        if index < nchild - 1 {
396            unsafe {
397                let ksrc: *const K = &self.keymap[index + 1] as *const K;
398                let vsrc: *const V = &self.valmap[index + 1] as *const V;
399
400                let kdst: *mut K = &mut self.keymap[index] as *mut K;
401                let vdst: *mut V = &mut self.valmap[index] as *mut V;
402
403                let count = nchild - index - 1;
404
405                std::ptr::copy::<K>(ksrc, kdst, count);
406                std::ptr::copy::<V>(vsrc, vdst, count);
407            }
408        }
409
410        nchild -= 1;
411        self.set_nchild(nchild);
412    }
413}
414
415impl<'a, K, V> Drop for BtreeNode<'a, K, V> {
416    fn drop(&mut self) {
417        if self.ptr.is_null() {
418            return;
419        }
420        if let Ok(layout) = std::alloc::Layout::from_size_align(self.size, MIN_ALIGNED) {
421            unsafe { std::alloc::dealloc(self.ptr as *mut u8, layout) };
422        }
423    }
424}
425
426impl<'a, K, V> PartialEq for BtreeNode<'a, K, V> {
427    fn eq(&self, other: &Self) -> bool {
428        std::ptr::addr_of!(self.header) == std::ptr::addr_of!(other.header)
429    }
430}
431
432impl<'a, K, V> fmt::Display for BtreeNode<'a, K, V>
433    where
434        K: Copy + fmt::Display + std::cmp::PartialOrd,
435        V: Copy + fmt::Display
436{
437    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
438        if self.is_large() {
439            write!(f, "===== dump btree node @{:?} ROOT ====\n", self.header as *const NodeHeader)?;
440        } else {
441            if self.id().is_some() {
442                write!(f, "===== dump btree node @{:?} id {} ====\n", self.header as *const NodeHeader, self.id().unwrap())?;
443            } else {
444                write!(f, "===== dump btree node @{:?} id None ====\n", self.header as *const NodeHeader)?;
445            }
446        }
447        write!(f, "  flags: {},  level: {}, nchildren: {}, capacity: {}\n",
448            self.header.flags, self.header.level, self.header.nchildren, self.capacity)?;
449        for idx in 0..self.header.nchildren.into() {
450            write!(f, "{:3}   {:20}   {:20}\n", idx, self.get_key(idx), self.get_val(idx))?;
451        }
452        write!(f, "")
453    }
454}
455
456#[derive(Debug)]
457#[repr(C, align(8))]
458pub struct DirectNode<'a, V> {
459    header: &'a mut NodeHeader,
460    valmap: &'a mut [V],
461    capacity: usize,
462    ptr: *const u8,
463    size: usize,
464    dirty: bool,
465    _pin: PhantomPinned,
466}
467
468impl<'a, V> DirectNode<'a, V>
469    where
470        V: Copy + fmt::Display
471{
472    pub fn from_slice(buf: &[u8]) -> Self {
473        let len = buf.len();
474        let hdr_size = std::mem::size_of::<NodeHeader>();
475        if len < hdr_size {
476            panic!("input buf size {} smaller than a valid btree node header size {}", len, hdr_size);
477        }
478
479        let ptr = buf.as_ptr() as *mut u8;
480        let header = unsafe {
481            ptr.cast::<NodeHeader>().as_mut().unwrap()
482        };
483
484        let val_size = std::mem::size_of::<V>();
485        let capacity = (len - hdr_size) / val_size;
486        assert!(capacity >= header.nchildren as usize,
487            "nchildren in header is large than it's capacity {} > {}", header.nchildren, capacity);
488
489        let valmap = unsafe {
490            std::slice::from_raw_parts_mut(ptr.add(hdr_size) as *mut V, capacity)
491        };
492
493        Self {
494            header: header,
495            valmap: valmap,
496            capacity: capacity,
497            ptr: std::ptr::null(),
498            size: len,
499            dirty: false,
500            _pin: PhantomPinned,
501        }
502    }
503
504    pub fn new(size: usize) -> Option<Self> {
505        if let Ok(aligned_layout) = std::alloc::Layout::from_size_align(size, MIN_ALIGNED) {
506            let ptr = unsafe { std::alloc::alloc_zeroed(aligned_layout) };
507            if ptr.is_null() {
508                return None;
509            }
510
511            let data = unsafe { std::slice::from_raw_parts(ptr, size) };
512            let mut node = Self::from_slice(data);
513            node.ptr = ptr;
514            return Some(node);
515        };
516        None
517    }
518
519    pub fn copy_from_slice(buf: &[u8]) -> Option<Self> {
520        let size = buf.len();
521        if let Some(mut n) = Self::new(size) {
522            // copy data from buf to inner data
523            let data = n.as_mut();
524            data.copy_from_slice(buf);
525            return Some(n);
526        }
527        None
528    }
529
530    #[inline]
531    pub fn init(&mut self, flags: usize, level: usize, nchild: usize) {
532        self.header.flags = flags as u8;
533        self.header.level = level as u8;
534        self.header.nchildren = nchild as u16;
535    }
536
537    #[inline]
538    pub fn get_val(&self, index: usize) -> V {
539        self.valmap[index]
540    }
541
542    #[inline]
543    pub fn set_val(&mut self, index: usize, val: &V) {
544        self.valmap[index] = *val;
545    }
546
547    #[inline]
548    pub fn get_capacity(&self) -> usize {
549        self.capacity
550    }
551
552    pub fn as_ref(&self) -> &[u8] {
553        unsafe {
554            std::slice::from_raw_parts(self.ptr as *const u8, self.size)
555        }
556    }
557
558    pub fn as_mut(&mut self) -> &mut [u8] {
559        unsafe {
560            std::slice::from_raw_parts_mut(self.ptr as *mut u8, self.size)
561        }
562    }
563}
564
565impl<'a, V> Drop for DirectNode<'a, V> {
566    fn drop(&mut self) {
567        if self.ptr.is_null() {
568            return;
569        }
570        if let Ok(layout) = std::alloc::Layout::from_size_align(self.size, MIN_ALIGNED) {
571            unsafe { std::alloc::dealloc(self.ptr as *mut u8, layout) };
572        }
573    }
574}
575
576impl<'a, V> fmt::Display for DirectNode<'a, V>
577    where
578        V: Copy + fmt::Display
579{
580    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
581        write!(f, "===== dump direct node @{:?} ====\n", self.header as *const NodeHeader)?;
582        write!(f, "  flags: {},  level: {}, nchildren: {}, capacity: {}\n",
583            self.header.flags, self.header.level, self.header.nchildren, self.capacity)?;
584        for idx in 0..self.capacity {
585            write!(f, "{:3}   {:20}   {:20}\n", idx, idx, self.get_val(idx))?;
586        }
587        write!(f, "")
588    }
589}
590
591#[cfg(test)]
592mod tests {
593    use super::*;
594
595    #[test]
596    fn node() {
597    }
598}