1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
use crate::internal::gc_info::GCInfoIndex;
use modular_bitfield::prelude::*;
use std::mem::size_of;

/// HeapObjectHeader contains meta data per object and is prepended to each
/// object.
///```
/// +-----------------+------+------------------------------------------+
/// | name            | bits |                                          |
/// +-----------------+------+------------------------------------------+
/// | padding         |   32 | Only present on 64-bit platform.         |
/// +-----------------+------+------------------------------------------+
/// | GCInfoIndex     |   14 |                                          |
/// | unused          |    1 |                                          |
/// | unused          |    1 | In construction encoded as |false|.      |
/// +-----------------+------+------------------------------------------+
/// | size            |   13 | 17 bits because allocations are aligned. |
/// | unused          |    1 |                                          |
/// | cell state      |    2 |                                          |
/// +-----------------+------+------------------------------------------+
///```
/// Notes:
/// - See [GCInfoTable](crate::gc_info_table::GCInfoTable) for constraints on GCInfoIndex.
/// - `size` for regular objects is encoded with 14 bits but can actually
///   represent sizes up to |kBlinkPageSize| (2^17) because allocations are
///   always 8 byte aligned (see kAllocationGranularity).
/// - `size` for large objects is encoded as 0. The size of a large object is
///   stored in [PreciseAllocation::cell_size](crate::large_space::PreciseAllocation::cell_size).
#[derive(Clone, Copy)]
#[repr(C)]
pub struct HeapObjectHeader {
    #[cfg(target_pointer_width = "64")]
    _padding: u32,
    encoded_high: EncodedHigh,
    encoded_low: u16,
}

pub const ALLOCATION_GRANULARITY: usize = size_of::<usize>();

impl HeapObjectHeader {
    /// Check if allocation is "precise". Precise allocations are large allocations that have larger header stored behind `HeapObjectHeader`. 
    pub fn is_precise(&self) -> bool {
        self.get_size() == 0
    }
    /// Set this header as free. 
    pub fn set_free(&mut self) {
        self.encoded_low = 0;
    }
    /// Creates heap object header from object pointer. It must be valid pointer. 
    #[inline(always)]
    pub unsafe fn from_object(obj: *const u8) -> *mut Self {
        (obj as usize - size_of::<Self>()) as _
    }
    /// Returns payload of this heap object header. 
    #[inline(always)]
    pub fn payload(&self) -> *const u8 {
        (self as *const Self as usize + size_of::<Self>()) as _
    }
    /// Returns [GCInfoIndex] of allocated object. 
    #[inline(always)]
    pub fn get_gc_info_index(&self) -> GCInfoIndex {
        debug_assert!(
            self.encoded_low > 0,
            "Trying to access non-allocated header {:p} (idx {})",
            self,
            self.encoded_low
        );
        GCInfoIndex(self.encoded_low)
    }
    /// Returns size of an object. If it is allocated in large object space `0` is returned.
    #[inline(always)]
    pub fn get_size(self) -> usize {
        let size = self.encoded_high.size();
        size as usize * ALLOCATION_GRANULARITY
    }
    #[inline(always)]
    pub fn set_size(&mut self, size: usize) {
        self.encoded_high
            .set_size((size / ALLOCATION_GRANULARITY) as _);
    }
    #[inline(always)]
    pub fn is_grey(self) -> bool {
        self.encoded_high.state() == CellState::PossiblyGrey
    }
    #[inline(always)]
    pub fn is_white(self) -> bool {
        self.encoded_high.state() == CellState::DefinitelyWhite
    }
    #[inline(always)]
    pub fn is_black(self) -> bool {
        self.encoded_high.state() == CellState::PossiblyBlack
    }

    #[inline(always)]
    pub fn set_state(&mut self, current: CellState, new: CellState) -> bool {
        if self.encoded_high.state() != current {
            return false;
        }
        self.encoded_high.set_state(new);
        debug_assert_eq!(self.state(), new);
        true
    }
    #[inline(always)]
    pub fn force_set_state(&mut self, state: CellState) {
        self.encoded_high.set_state(state);
    }
    #[inline(always)]
    pub fn set_gc_info(&mut self, index: GCInfoIndex) {
        self.encoded_low = index.0;
    }
    #[inline(always)]
    pub fn is_free(&self) -> bool {
        self.get_gc_info_index().0 == 0
    }

    #[inline]
    pub fn state(&self) -> CellState {
        self.encoded_high.state()
    }
}

#[bitfield(bits = 16)]
#[derive(Clone, Copy)]
pub struct EncodedHigh {
    size: B13,
    #[allow(dead_code)]
    pinned: B1,
    #[bits = 2]
    state: CellState,
}

#[derive(Clone, Copy, PartialEq, Eq, Debug, PartialOrd, Ord, BitfieldSpecifier)]
#[bits = 2]
pub enum CellState {
    DefinitelyWhite,
    PossiblyGrey,
    PossiblyBlack,
}