btree_ondisk/
node.rs

1use std::ptr;
2use std::fmt;
3use std::marker::PhantomPinned;
4use std::marker::PhantomData;
5use crate::ondisk::NodeHeader;
6use crate::NodeValue;
7
8pub const BTREE_NODE_FLAG_LARGE: u8 = 0b0000_0001;
9pub const BTREE_NODE_FLAG_LEAF: u8 = 0b0000_0010;
10pub const BTREE_NODE_LEVEL_DATA: usize = 0x00;
11pub const BTREE_NODE_LEVEL_MIN: usize = BTREE_NODE_LEVEL_DATA + 1;
12pub const BTREE_NODE_LEVEL_MAX: usize = 14;
13pub const BTREE_NODE_LEVEL_LEAF: usize = BTREE_NODE_LEVEL_MIN;
14
15const MIN_ALIGNED: usize = 8;
16
17/// btree node descriptor for memory pointer, normally a page
18///
19/// SAFETY:
20///   node operations mutable by unsafe code,
21///   this works because all ops for same node are expected to be ran in a single thread
22///
23#[derive(Debug)]
24#[repr(C, align(8))]
25pub struct BtreeNode<'a, K, V, P> {
26    header: &'a mut NodeHeader,
27    keymap: &'a mut [K],
28    valptr: *mut u8,
29    capacity: usize,    // kv capacity of this btree node
30    ptr: *const u8,
31    size: usize,
32    id: P,
33    dirty: bool,
34    _pin: PhantomPinned,
35    phantom: PhantomData<V>,
36}
37
38#[cfg(feature = "arc")]
39unsafe impl<'a, K, V, P> Send for BtreeNode<'a, K, V, P> {}
40#[cfg(feature = "arc")]
41unsafe impl<'a, K, V, P> Sync for BtreeNode<'a, K, V, P> {}
42
43impl<'a, K, V, P> BtreeNode<'a, K, V, P>
44    where
45        K: Copy + fmt::Display + std::cmp::PartialOrd,
46        V: Copy + fmt::Display + NodeValue,
47        P: Copy + fmt::Display + NodeValue,
48{
49    pub fn from_slice(buf: &[u8]) -> Self {
50        let len = buf.len();
51        let hdr_size = std::mem::size_of::<NodeHeader>();
52        if len < hdr_size {
53            panic!("input buf size {} smaller than a valid btree node header size {}", len, hdr_size);
54        }
55
56        let ptr = buf.as_ptr() as *mut u8;
57        let header = unsafe {
58            ptr.cast::<NodeHeader>().as_mut().unwrap()
59        };
60
61        let key_size = std::mem::size_of::<K>();
62        let val_size = if header.flags & BTREE_NODE_FLAG_LEAF > 0 {
63            std::mem::size_of::<V>()
64        } else {
65            std::mem::size_of::<P>()
66        };
67        let capacity = (len - hdr_size) / (key_size + val_size);
68        assert!(capacity >= header.nchildren as usize,
69            "nchildren in header is large than it's capacity {} > {}", header.nchildren, capacity);
70
71        let keymap = unsafe {
72            std::slice::from_raw_parts_mut(ptr.add(hdr_size) as *mut K, capacity)
73        };
74
75        let valptr = unsafe {
76            ptr.add(hdr_size + capacity * key_size)
77        };
78
79        Self {
80            header: header,
81            keymap: keymap,
82            valptr: valptr,
83            capacity: capacity,
84            ptr: std::ptr::null(),
85            size: len,
86            id: P::invalid_value(),
87            dirty: false,
88            _pin: PhantomPinned,
89            phantom: PhantomData,
90        }
91    }
92
93    pub fn new(size: usize) -> Option<Self> {
94        if let Ok(aligned_layout) = std::alloc::Layout::from_size_align(size, MIN_ALIGNED) {
95            let ptr = unsafe { std::alloc::alloc_zeroed(aligned_layout) };
96            if ptr.is_null() {
97                return None;
98            }
99
100            let data = unsafe { std::slice::from_raw_parts(ptr, size) };
101            let mut node = Self::from_slice(data);
102            node.ptr = ptr;
103            node.id = P::invalid_value();
104            return Some(node);
105        };
106        None
107    }
108
109    pub fn new_with_id(size: usize, id: &P) -> Option<Self> {
110        if let Some(node) = Self::new(size) {
111            node.set_id(*id);
112            return Some(node);
113        }
114        None
115    }
116
117    pub fn copy_from_slice(id: P, buf: &[u8]) -> Option<Self> {
118        let size = buf.len();
119        if let Ok(aligned_layout) = std::alloc::Layout::from_size_align(size, MIN_ALIGNED) {
120            let ptr = unsafe { std::alloc::alloc_zeroed(aligned_layout) };
121            if ptr.is_null() {
122                return None;
123            }
124
125            let data = unsafe { std::slice::from_raw_parts_mut(ptr, size) };
126            // copy data from buf to inner data
127            data.copy_from_slice(buf);
128            let mut node = Self::from_slice(data);
129            node.ptr = ptr;
130            node.id = id;
131            return Some(node);
132        };
133        None
134    }
135
136    pub fn as_ref(&self) -> &[u8] {
137        unsafe {
138            std::slice::from_raw_parts(self.ptr as *const u8, self.size)
139        }
140    }
141
142    pub fn as_mut(&self) -> &mut [u8] {
143        unsafe {
144            std::slice::from_raw_parts_mut(self.ptr as *mut u8, self.size)
145        }
146    }
147
148    #[inline]
149    pub fn is_leaf(&self) -> bool {
150        if (self.header.flags & BTREE_NODE_FLAG_LEAF) == BTREE_NODE_FLAG_LEAF {
151            return true;
152        }
153        false
154    }
155
156    #[inline]
157    pub fn set_leaf(&self) {
158        let ptr = ptr::addr_of!(self.header.flags) as *mut u8;
159        unsafe {
160            let mut flags = self.header.flags;
161            flags |= BTREE_NODE_FLAG_LEAF;
162            ptr::write_volatile(ptr, flags);
163        }
164    }
165
166    #[inline]
167    pub fn clear_leaf(&self) {
168        let ptr = ptr::addr_of!(self.header.flags) as *mut u8;
169        unsafe {
170            let mut flags = self.header.flags;
171            flags &= !BTREE_NODE_FLAG_LEAF;
172            ptr::write_volatile(ptr, flags);
173        }
174    }
175
176    #[inline]
177    pub fn is_large(&self) -> bool {
178        if (self.header.flags & BTREE_NODE_FLAG_LARGE) == BTREE_NODE_FLAG_LARGE {
179            return true;
180        }
181        false
182    }
183
184    #[inline]
185    pub fn set_large(&self) {
186        let ptr = ptr::addr_of!(self.header.flags) as *mut u8;
187        unsafe {
188            let mut flags = self.header.flags;
189            flags |= BTREE_NODE_FLAG_LARGE;
190            ptr::write_volatile(ptr, flags);
191        }
192    }
193
194    #[inline]
195    pub fn clear_large(&self) {
196        let ptr = ptr::addr_of!(self.header.flags) as *mut u8;
197        unsafe {
198            let mut flags = self.header.flags;
199            flags &= !BTREE_NODE_FLAG_LARGE;
200            ptr::write_volatile(ptr, flags);
201        }
202    }
203
204    #[inline]
205    pub fn get_flags(&self) -> u8 {
206        self.header.flags
207    }
208
209    #[inline]
210    pub fn set_flags(&self, flags: u8) {
211        let ptr = ptr::addr_of!(self.header.flags) as *mut u8;
212        unsafe {
213            ptr::write_volatile(ptr, flags as u8);
214        }
215    }
216
217    #[inline]
218    pub fn get_level(&self) -> usize {
219        self.header.level as usize
220    }
221
222    #[inline]
223    pub fn set_level(&self, level: usize) {
224        let ptr = ptr::addr_of!(self.header.level) as *mut u8;
225        unsafe {
226            ptr::write_volatile(ptr, level as u8);
227        }
228    }
229
230    #[inline]
231    pub fn get_key(&self, index: usize) -> &K {
232        &self.keymap[index]
233    }
234
235    #[inline]
236    pub fn set_key(&self, index: usize, key: &K) {
237         unsafe {
238            ptr::copy_nonoverlapping(
239                ptr::addr_of!(*key),
240                ptr::addr_of!(self.keymap[index]) as *mut K,
241                1
242            )
243        }
244    }
245
246    #[inline]
247    pub fn get_val<X>(&self, index: usize) -> &X {
248        let slice = unsafe {
249            std::slice::from_raw_parts(self.valptr as *const X, self.capacity)
250        };
251        &slice[index]
252    }
253
254    #[inline]
255    pub fn set_val<X>(&self, index: usize, val: &X) {
256        unsafe {
257            let dst = (self.valptr as *mut X).add(index);
258            ptr::copy_nonoverlapping(ptr::addr_of!(*val), dst, 1)
259        }
260    }
261
262    #[inline]
263    pub fn get_nchild(&self) -> usize {
264        self.header.nchildren as usize
265    }
266
267    #[inline]
268    pub fn set_nchild(&self, c: usize) {
269        let ptr = ptr::addr_of!(self.header.nchildren) as *mut u16;
270        unsafe {
271            ptr::write_volatile(ptr, c as u16);
272        }
273    }
274
275    #[inline]
276    pub fn set_nchild_use_p(&self, c: usize) {
277        let nchild_ptr: *mut u16 = &self.header.nchildren as *const _ as *mut u16;
278        let nchild = c as u16;
279        unsafe {
280            std::ptr::copy::<u16>(ptr::addr_of!(nchild), nchild_ptr, 1);
281        }
282    }
283
284    #[inline]
285    pub fn get_userdata(&self) -> u32 {
286        self.header.userdata
287    }
288
289    #[inline]
290    pub fn set_userdata(&self, data: u32) {
291        let ptr = ptr::addr_of!(self.header.userdata) as *mut u32;
292        unsafe {
293            ptr::write_volatile(ptr, data);
294        }
295    }
296
297    #[inline]
298    pub fn get_capacity(&self) -> usize {
299        self.capacity
300    }
301
302    #[inline]
303    // dynamic calc V capacity
304    pub fn get_v_capacity(&self) -> usize {
305        let hdr_size = std::mem::size_of::<NodeHeader>();
306        let key_size = std::mem::size_of::<K>();
307        let val_size = std::mem::size_of::<V>();
308        (self.size - hdr_size) / (key_size + val_size)
309    }
310
311    #[inline]
312    pub fn has_free_slots(&self) -> bool {
313        self.get_nchild() < self.capacity
314    }
315
316    #[inline]
317    pub fn get_nchild_min(&self) -> usize {
318        (self.capacity - 1) / 2 + 1
319    }
320
321    #[inline]
322    pub fn is_overflowing(&self) -> bool {
323        self.get_nchild() > self.get_nchild_min()
324    }
325
326    #[inline]
327    pub fn node_key(&self) -> &K {
328        &self.keymap[0]
329    }
330
331    #[inline]
332    pub fn id(&self) -> &P {
333        &self.id
334    }
335
336    #[inline]
337    pub fn set_id(&self, id: P) {
338        let ptr = ptr::addr_of!(self.id) as *mut P;
339        unsafe {
340            ptr::write_volatile(ptr, id);
341        }
342    }
343
344    #[inline]
345    // re-calc capacity and valptr by flags
346    pub(crate) fn do_update(&mut self) {
347        let len = self.size;
348        let hdr_size = std::mem::size_of::<NodeHeader>();
349        if len < hdr_size {
350            panic!("input buf size {} smaller than a valid btree node header size {}", len, hdr_size);
351        }
352
353        let ptr = ptr::addr_of!(self.header.flags) as *mut u8;
354
355        let key_size = std::mem::size_of::<K>();
356        let val_size = if self.get_level() == BTREE_NODE_LEVEL_LEAF {
357            assert!(self.get_flags() & BTREE_NODE_FLAG_LEAF == BTREE_NODE_FLAG_LEAF);
358            std::mem::size_of::<V>()
359        } else {
360            assert!(self.get_flags() & BTREE_NODE_FLAG_LEAF != BTREE_NODE_FLAG_LEAF);
361            std::mem::size_of::<P>()
362        };
363        let capacity = (len - hdr_size) / (key_size + val_size);
364
365        self.keymap = unsafe {
366            std::slice::from_raw_parts_mut(ptr.add(hdr_size) as *mut K, capacity)
367        };
368        self.valptr = unsafe {
369            ptr.add(hdr_size + capacity * key_size)
370        };
371        self.capacity = capacity;
372    }
373
374    #[inline]
375    // re-calc capacity and valptr based on X
376    pub(crate) fn do_reinit<X>(&mut self) {
377        let len = self.size;
378        let hdr_size = std::mem::size_of::<NodeHeader>();
379        if len < hdr_size {
380            panic!("input buf size {} smaller than a valid btree node header size {}", len, hdr_size);
381        }
382
383        let ptr = ptr::addr_of!(self.header.flags) as *mut u8;
384
385        let key_size = std::mem::size_of::<K>();
386        let val_size = std::mem::size_of::<X>();
387        let capacity = (len - hdr_size) / (key_size + val_size);
388
389        self.keymap = unsafe {
390            std::slice::from_raw_parts_mut(ptr.add(hdr_size) as *mut K, capacity)
391        };
392        self.valptr = unsafe {
393            ptr.add(hdr_size + capacity * key_size)
394        };
395        self.capacity = capacity;
396    }
397
398    #[inline]
399    pub fn init(&self, level: usize, nchild: usize) {
400        if level == BTREE_NODE_LEVEL_LEAF {
401            self.set_leaf();
402        }
403        self.set_level(level);
404        self.set_nchild(nchild);
405    }
406
407    #[inline]
408    pub fn init_root(&self, level: usize, is_large: bool) {
409        if level == BTREE_NODE_LEVEL_LEAF {
410            self.set_leaf();
411        }
412        if is_large {
413            self.set_large();
414        }
415        self.set_level(level);
416    }
417
418    #[inline]
419    pub fn is_dirty(&self) -> bool {
420        self.dirty
421    }
422
423    #[inline]
424    pub fn mark_dirty(&self) {
425        let ptr = ptr::addr_of!(self.dirty) as *mut bool;
426        unsafe {
427            ptr::write_volatile(ptr, true);
428        }
429    }
430
431    #[inline]
432    pub fn clear_dirty(&self) {
433        let ptr = ptr::addr_of!(self.dirty) as *mut bool;
434        unsafe {
435            ptr::write_volatile(ptr, false);
436        }
437    }
438
439    #[inline]
440    // move n k,v pairs from head of right append to left
441    // and move rest of right to it's head
442    fn do_move_left<X>(left: &BtreeNode<K, V, P>, right: &BtreeNode<K, V, P>, n: usize) {
443
444        // input param protection
445        if n == 0 { return; }
446
447        assert!(left.is_leaf() == right.is_leaf());
448
449        let mut lnchild = left.get_nchild();
450        let mut rnchild = right.get_nchild();
451
452        unsafe {
453
454        let lkeymap_tail_ptr = &left.keymap[lnchild] as *const K as *mut K;
455        let lvalmap_tail_ptr = (left.valptr as *mut X).add(lnchild);
456
457        let rkeymap_head_ptr = &right.keymap[0] as *const K as *mut K;
458        let rvalmap_head_ptr = right.valptr as *mut X;
459
460        let rkeymap_n_ptr = &right.keymap[n] as *const K as *mut K;
461        let rvalmap_n_ptr = (right.valptr as *mut X).add(n);
462
463
464        // append right to left
465        ptr::copy::<K>(rkeymap_head_ptr, lkeymap_tail_ptr, n);
466        ptr::copy::<X>(rvalmap_head_ptr, lvalmap_tail_ptr, n);
467
468        // move rest of right to it's head
469        ptr::copy::<K>(rkeymap_n_ptr, rkeymap_head_ptr, rnchild - n);
470        ptr::copy::<X>(rvalmap_n_ptr, rvalmap_head_ptr, rnchild - n);
471
472        }
473
474        lnchild += n;
475        rnchild -= n;
476
477        left.set_nchild_use_p(lnchild);
478        right.set_nchild_use_p(rnchild);
479    }
480
481    pub fn move_left(left: &BtreeNode<K, V, P>, right: &BtreeNode<K, V, P>, n: usize) {
482        if left.is_leaf() && right.is_leaf() {
483            Self::do_move_left::<V>(left, right, n);
484        } else if !left.is_leaf() && !right.is_leaf() {
485            Self::do_move_left::<P>(left, right, n);
486        } else {
487            panic!("left node is leaf {}, right node is leaf {}, not consistent", left.is_leaf(), right.is_leaf());
488        }
489    }
490
491    #[inline]
492    // reserve space at head of right for n slot
493    // move n k,v pairs from tail of left to head of right
494    fn do_move_right<X>(left: &BtreeNode<K, V, P>, right: &BtreeNode<K, V, P>, n: usize) {
495
496        // input param protection
497        if n == 0 { return; }
498
499        let mut lnchild = left.get_nchild();
500        let mut rnchild = right.get_nchild();
501
502        unsafe {
503
504        let lkeymap_tailn_ptr = &left.keymap[lnchild - n] as *const K as *mut K;
505        let lvalmap_tailn_ptr = (left.valptr as *mut X).add(lnchild - n);
506
507        let rkeymap_head_ptr = &right.keymap[0] as *const K as *mut K;
508        let rvalmap_head_ptr = right.valptr as *mut X;
509
510        let rkeymap_n_ptr = &right.keymap[n] as *const K as *mut K;
511        let rvalmap_n_ptr = (right.valptr as *mut X).add(n);
512
513
514        // reserve n slot by move all child from head to n
515        std::ptr::copy::<K>(rkeymap_head_ptr, rkeymap_n_ptr, rnchild);
516        std::ptr::copy::<X>(rvalmap_head_ptr, rvalmap_n_ptr, rnchild);
517
518        // move n k,v pairs from tail of left to head of right
519        std::ptr::copy::<K>(lkeymap_tailn_ptr, rkeymap_head_ptr, n);
520        std::ptr::copy::<X>(lvalmap_tailn_ptr, rvalmap_head_ptr, n);
521
522        }
523
524        lnchild -= n;
525        rnchild += n;
526
527        left.set_nchild_use_p(lnchild);
528        right.set_nchild_use_p(rnchild);
529    }
530
531    pub fn move_right(left: &BtreeNode<K, V, P>, right: &BtreeNode<K, V, P>, n: usize) {
532        if left.is_leaf() && right.is_leaf() {
533            Self::do_move_right::<V>(left, right, n);
534        } else if !left.is_leaf() && !right.is_leaf() {
535            Self::do_move_right::<P>(left, right, n);
536        } else {
537            panic!("left node is leaf {}, right node is leaf {}, not consistent", left.is_leaf(), right.is_leaf());
538        }
539    }
540
541    // lookup key
542    // @return:
543    //   - (found, index)
544    //   - (notfound, index)
545    pub fn lookup(&self, key: &K) -> (bool, usize) {
546        let mut low: isize = 0;
547        let mut high: isize = (self.header.nchildren - 1) as isize;
548        let mut s = false;
549        let mut index = 0;
550
551        while low <= high {
552            index = (low + high) / 2;
553            let nkey = self.get_key(index as usize);
554            if nkey == key {
555                return (true, index as usize);
556            } else if nkey < key {
557                low = index + 1;
558                s = false;
559            } else {
560                high = index - 1;
561                s = true;
562            }
563        }
564
565        if self.get_level() > BTREE_NODE_LEVEL_MIN {
566            if s && index > 0 {
567                index -= 1;
568            }
569        } else if s == false {
570            index += 1;
571        }
572
573        return (false, index as usize);
574    }
575
576    // insert key val @ index
577    pub fn insert<X>(&self, index: usize, key: &K, val: &X) {
578        let mut nchild = self.get_nchild();
579
580        if index < nchild {
581            unsafe {
582                let ksrc: *const K = &self.keymap[index] as *const K;
583                let vsrc: *const X = (self.valptr as *const X).add(index);
584
585                let kdst: *mut K = ptr::addr_of!(self.keymap[index + 1]) as *mut K;
586                let vdst: *mut X = (self.valptr as *mut X).add(index + 1);
587
588                let count = nchild - index;
589
590                std::ptr::copy::<K>(ksrc, kdst, count);
591                std::ptr::copy::<X>(vsrc, vdst, count);
592            }
593        }
594
595        self.set_key(index, key);
596        self.set_val(index, val);
597        nchild += 1;
598        self.set_nchild(nchild);
599    }
600
601    // delete key val @ index
602    pub fn delete<X: Copy>(&self, index: usize, key: &mut K, val: &mut X) {
603        let mut nchild = self.get_nchild();
604
605        *key = *self.get_key(index);
606        *val = *self.get_val(index);
607
608        if index < nchild - 1 {
609            unsafe {
610                let ksrc: *const K = &self.keymap[index + 1] as *const K;
611                let vsrc: *const X = (self.valptr as *const X).add(index + 1);
612
613                let kdst: *mut K = ptr::addr_of!(self.keymap[index]) as *mut K;
614                let vdst: *mut X = (self.valptr as *mut X).add(index);
615
616                let count = nchild - index - 1;
617
618                std::ptr::copy::<K>(ksrc, kdst, count);
619                std::ptr::copy::<X>(vsrc, vdst, count);
620            }
621        }
622
623        nchild -= 1;
624        self.set_nchild(nchild);
625    }
626}
627
628impl<'a, K, V, P> Drop for BtreeNode<'a, K, V, P> {
629    fn drop(&mut self) {
630        if self.ptr.is_null() {
631            return;
632        }
633        if let Ok(layout) = std::alloc::Layout::from_size_align(self.size, MIN_ALIGNED) {
634            unsafe { std::alloc::dealloc(self.ptr as *mut u8, layout) };
635        }
636    }
637}
638
639impl<'a, K, V, P> PartialEq for BtreeNode<'a, K, V, P> {
640    fn eq(&self, other: &Self) -> bool {
641        std::ptr::addr_of!(self.header) == std::ptr::addr_of!(other.header)
642    }
643}
644
645impl<'a, K, V, P> fmt::Display for BtreeNode<'a, K, V, P>
646    where
647        K: Copy + fmt::Display + std::cmp::PartialOrd,
648        V: Copy + fmt::Display + NodeValue,
649        P: Copy + fmt::Display + NodeValue,
650{
651    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
652        if self.is_large() {
653            write!(f, "===== dump btree node @{:?} ROOT ====\n", self.header as *const NodeHeader)?;
654        } else {
655            write!(f, "===== dump btree node @{:?} id {} ====\n", self.header as *const NodeHeader, self.id())?;
656        }
657        write!(f, "  flags: {},  level: {}, nchildren: {}, capacity: {}, is leaf: {}\n",
658            self.header.flags, self.header.level, self.header.nchildren, self.capacity, self.is_leaf())?;
659        for idx in 0..self.header.nchildren.into() {
660            if self.is_leaf() {
661                write!(f, "{:3}   {:20}   {:20}\n", idx, self.get_key(idx), self.get_val::<K>(idx))?;
662            } else {
663                write!(f, "{:3}   {:20}   {:20}\n", idx, self.get_key(idx), self.get_val::<P>(idx))?;
664            }
665        }
666        write!(f, "")
667    }
668}
669
670/// direct node descriptor for memory pointer, normally a tiny memory buffer
671///
672/// SAFETY:
673///   node operations in immutable by unsafe code,
674///   this works because all ops for same node are expected to be ran in a single thread
675///
676#[derive(Debug)]
677#[repr(C, align(8))]
678pub struct DirectNode<'a, V> {
679    header: &'a mut NodeHeader,
680    valmap: &'a mut [V],
681    capacity: usize,
682    ptr: *const u8,
683    size: usize,
684    dirty: bool,
685    _pin: PhantomPinned,
686}
687
688#[cfg(feature = "arc")]
689unsafe impl<'a, V> Send for DirectNode<'a, V> {}
690#[cfg(feature = "arc")]
691unsafe impl<'a, V> Sync for DirectNode<'a, V> {}
692
693impl<'a, V> DirectNode<'a, V>
694    where
695        V: Copy + fmt::Display
696{
697    pub fn from_slice(buf: &[u8]) -> Self {
698        let len = buf.len();
699        let hdr_size = std::mem::size_of::<NodeHeader>();
700        if len < hdr_size {
701            panic!("input buf size {} smaller than a valid btree node header size {}", len, hdr_size);
702        }
703
704        let ptr = buf.as_ptr() as *mut u8;
705        let header = unsafe {
706            ptr.cast::<NodeHeader>().as_mut().unwrap()
707        };
708
709        let val_size = std::mem::size_of::<V>();
710        let capacity = (len - hdr_size) / val_size;
711        assert!(capacity >= header.nchildren as usize,
712            "nchildren in header is large than it's capacity {} > {}", header.nchildren, capacity);
713
714        let valmap = unsafe {
715            std::slice::from_raw_parts_mut(ptr.add(hdr_size) as *mut V, capacity)
716        };
717
718        Self {
719            header: header,
720            valmap: valmap,
721            capacity: capacity,
722            ptr: std::ptr::null(),
723            size: len,
724            dirty: false,
725            _pin: PhantomPinned,
726        }
727    }
728
729    pub fn new(size: usize) -> Option<Self> {
730        if let Ok(aligned_layout) = std::alloc::Layout::from_size_align(size, MIN_ALIGNED) {
731            let ptr = unsafe { std::alloc::alloc_zeroed(aligned_layout) };
732            if ptr.is_null() {
733                return None;
734            }
735
736            let data = unsafe { std::slice::from_raw_parts(ptr, size) };
737            let mut node = Self::from_slice(data);
738            node.ptr = ptr;
739            return Some(node);
740        };
741        None
742    }
743
744    pub fn copy_from_slice(buf: &[u8]) -> Option<Self> {
745        let size = buf.len();
746        if let Some(n) = Self::new(size) {
747            // copy data from buf to inner data
748            let data = n.as_mut();
749            data.copy_from_slice(buf);
750            return Some(n);
751        }
752        None
753    }
754
755    #[inline]
756    pub fn init(&self, flags: usize, level: usize, nchild: usize) {
757        unsafe {
758            let ptr = ptr::addr_of!(self.header.flags) as *mut u8;
759            ptr::write_volatile(ptr, flags as u8);
760            let ptr = ptr::addr_of!(self.header.level) as *mut u8;
761            ptr::write_volatile(ptr, level as u8);
762            let ptr = ptr::addr_of!(self.header.nchildren) as *mut u16;
763            ptr::write_volatile(ptr, nchild as u16);
764        }
765    }
766
767    #[inline]
768    pub fn get_val(&self, index: usize) -> &V {
769        &self.valmap[index]
770    }
771
772    #[inline]
773    pub fn set_val(&self, index: usize, val: &V) {
774        unsafe {
775            ptr::copy_nonoverlapping(
776                ptr::addr_of!(*val),
777                ptr::addr_of!(self.valmap[index]) as *mut V,
778                1
779            )
780        }
781    }
782
783    #[inline]
784    pub fn get_userdata(&self) -> u32 {
785        self.header.userdata
786    }
787
788    #[inline]
789    pub fn set_userdata(&self, data: u32) {
790        let ptr = ptr::addr_of!(self.header.userdata) as *mut u32;
791        unsafe {
792            ptr::write_volatile(ptr, data);
793        }
794    }
795
796    #[inline]
797    pub fn get_capacity(&self) -> usize {
798        self.capacity
799    }
800
801    pub fn as_ref(&self) -> &[u8] {
802        unsafe {
803            std::slice::from_raw_parts(self.ptr as *const u8, self.size)
804        }
805    }
806
807    pub fn as_mut(&self) -> &mut [u8] {
808        unsafe {
809            std::slice::from_raw_parts_mut(self.ptr as *mut u8, self.size)
810        }
811    }
812}
813
814impl<'a, V> Drop for DirectNode<'a, V> {
815    fn drop(&mut self) {
816        if self.ptr.is_null() {
817            return;
818        }
819        if let Ok(layout) = std::alloc::Layout::from_size_align(self.size, MIN_ALIGNED) {
820            unsafe { std::alloc::dealloc(self.ptr as *mut u8, layout) };
821        }
822    }
823}
824
825impl<'a, V> fmt::Display for DirectNode<'a, V>
826    where
827        V: Copy + fmt::Display
828{
829    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
830        write!(f, "===== dump direct node @{:?} ====\n", self.header as *const NodeHeader)?;
831        write!(f, "  flags: {},  level: {}, nchildren: {}, capacity: {}\n",
832            self.header.flags, self.header.level, self.header.nchildren, self.capacity)?;
833        for idx in 0..self.capacity {
834            write!(f, "{:3}   {:20}   {:20}\n", idx, idx, self.get_val(idx))?;
835        }
836        write!(f, "")
837    }
838}
839
840#[cfg(test)]
841mod tests {
842    use super::*;
843
844    #[test]
845    fn node() {
846    }
847}