eta_algorithms/data_structs/bitmap/
atomic_bitmap.rs

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
use std::alloc::Layout;
use std::sync::atomic::{AtomicUsize, Ordering};

use crate::data_structs::bitmap::consts::{DIV_SHIFT, MASK};

#[derive(Clone, Copy)]
pub enum Mode {
    Relaxed,
    Strict,
}

pub struct AtomicBitmap {
    data: *mut AtomicUsize,
    bit_capacity: usize,
    capacity: usize,
    layout: Layout,
}

impl AtomicBitmap {
    pub fn new(bit_count: usize) -> Self {
        let size = (bit_count >> DIV_SHIFT) + 1;
        let layout = Layout::array::<AtomicUsize>(size).expect("Failed to create layout");
        let data = unsafe { std::alloc::alloc(layout) as *mut AtomicUsize };
        for i in 0..size {
            unsafe {
                (*data.add(i)).store(0, Ordering::Relaxed);
            }
        }
        AtomicBitmap {
            data,
            capacity: size,
            bit_capacity: bit_count,
            layout,
        }
    }

    pub fn to_indices_true(&self, mode: Mode) -> Vec<usize> {
        let mut indices = Vec::new();
        for i in 0..self.bit_capacity {
            if unsafe { self.get_unchecked(i, mode) } {
                indices.push(i);
            }
        }
        indices
    }
    pub fn to_indices_false(&self, mode: Mode) -> Vec<usize> {
        let mut indices = Vec::new();
        for i in 0..self.bit_capacity {
            if unsafe { self.get_unchecked(i, mode) == false } {
                indices.push(i);
            }
        }
        indices
    }

    #[inline(always)]
    pub fn bit_capacity(&self) -> usize {
        self.bit_capacity
    }
    #[inline(always)]
    pub fn capacity(&self) -> usize {
        self.capacity
    }
    #[inline(always)]
    pub fn set(&self, bit_index: usize, value: bool, mode: Mode) {
        if bit_index >= self.bit_capacity {
            panic!("Bit index out of bounds");
        }

        let offset = bit_index >> DIV_SHIFT;
        let bit_offset = bit_index & (MASK);
        unsafe {
            let ptr = self.data.add(offset);
            match mode {
                Mode::Relaxed => (*ptr).store(
                    ((*ptr).load(Ordering::Relaxed) & !(1 << bit_offset)) | ((value as usize) << bit_offset),
                    Ordering::Relaxed,
                ),
                Mode::Strict => (*ptr).store(
                    ((*ptr).load(Ordering::Acquire) & !(1 << bit_offset)) | ((value as usize) << bit_offset),
                    Ordering::Release,
                ),
            }
        }
    }
    #[inline(always)]
    pub fn get(&self, bit_index: usize, mode: Mode) -> Option<bool> {
        if bit_index >= self.bit_capacity {
            return None;
        }

        let offset = bit_index >> DIV_SHIFT;
        let bit_offset = bit_index & (MASK);
        unsafe {
            let ptr = self.data.add(offset);
            match mode {
                Mode::Relaxed => Some(((*ptr).load(Ordering::Relaxed) & (1 << bit_offset)) != 0),
                Mode::Strict => Some(((*ptr).load(Ordering::Acquire) & (1 << bit_offset)) != 0),
            }
        }
    }
    #[inline(always)]
    pub unsafe fn set_unchecked(&self, bit_index: usize, value: bool, mode: Mode) {
        let offset = bit_index >> DIV_SHIFT;
        let bit_offset = bit_index & (MASK);
        unsafe {
            let ptr = self.data.add(offset);
            match mode {
                Mode::Relaxed => (*ptr).store(
                    ((*ptr).load(Ordering::Relaxed) & !(1 << bit_offset)) | ((value as usize) << bit_offset),
                    Ordering::Relaxed,
                ),
                Mode::Strict => (*ptr).store(
                    ((*ptr).load(Ordering::Acquire) & !(1 << bit_offset)) | ((value as usize) << bit_offset),
                    Ordering::Release,
                ),
            }
        }
    }
    #[inline(always)]
    pub unsafe fn get_unchecked(&self, bit_index: usize, mode: Mode) -> bool {
        let offset = bit_index >> DIV_SHIFT;
        let bit_offset = bit_index & (MASK);
        unsafe {
            let ptr = self.data.add(offset);
            match mode {
                Mode::Relaxed => (*ptr).load(Ordering::Relaxed) & (1 << bit_offset) != 0,
                Mode::Strict => (*ptr).load(Ordering::Acquire) & (1 << bit_offset) != 0,
            }
        }
    }
}

impl Drop for AtomicBitmap {
    fn drop(&mut self) {
        unsafe {
            std::alloc::dealloc(self.data as *mut u8, self.layout);
        }
    }
}

unsafe impl Send for AtomicBitmap {}

unsafe impl Sync for AtomicBitmap {}