use std::cell::UnsafeCell;
use std::cmp::Ordering;
use std::fmt::{self as StdFmt, Debug, Formatter};
use std::ptr as StdPtr;
use std::sync::atomic::{AtomicU8, AtomicU16, AtomicU32, Ordering as AtomicOrdering};
use super::{SuffixBag, TreePermutation};
const WIDTH: usize = 15;
#[cfg(not(feature = "small-suffix-capacity"))]
const CAPACITY: usize = 512;
#[cfg(feature = "small-suffix-capacity")]
const CAPACITY: usize = 256;
const U16_MAX: usize = u16::MAX as usize;
const READ_ORD: AtomicOrdering = AtomicOrdering::Acquire;
const WRITE_ORD: AtomicOrdering = AtomicOrdering::Release;
const RELAXED: AtomicOrdering = AtomicOrdering::Relaxed;
#[derive(Clone, Copy, Debug)]
struct InlineSlotMeta {
offset: u16,
len: u16,
}
impl InlineSlotMeta {
const EMPTY: Self = Self {
offset: u16::MAX,
len: 0,
};
const EMPTY_PACKED: u32 = Self::EMPTY.pack();
#[inline(always)]
const fn has_suffix(self) -> bool {
self.offset != u16::MAX
}
#[inline(always)]
const fn pack(self) -> u32 {
((self.offset as u32) << 16) | (self.len as u32)
}
#[inline(always)]
#[expect(
clippy::cast_possible_truncation,
reason = "Intentional: extracting u16 fields from packed u32"
)]
const fn unpack(packed: u32) -> Self {
Self {
offset: (packed >> 16) as u16,
len: packed as u16,
}
}
}
impl Default for InlineSlotMeta {
#[inline(always)]
fn default() -> Self {
Self::EMPTY
}
}
#[repr(C)]
pub struct InlineSuffixBag {
slots: [AtomicU32; WIDTH],
size: AtomicU16,
suffix_count: AtomicU8,
_pad: u8,
data: UnsafeCell<[u8; CAPACITY]>,
}
impl Debug for InlineSuffixBag {
fn fmt(&self, f: &mut Formatter<'_>) -> StdFmt::Result {
f.debug_struct("InlineSuffixBag")
.field("size", &self.size.load(RELAXED))
.field("suffix_count", &self.suffix_count.load(RELAXED))
.field("capacity", &CAPACITY)
.finish_non_exhaustive()
}
}
#[allow(dead_code)]
impl InlineSuffixBag {
const EMPTY_PACKED: u32 = InlineSlotMeta::EMPTY_PACKED;
#[must_use]
#[inline(always)]
pub const fn new() -> Self {
Self {
slots: [const { AtomicU32::new(Self::EMPTY_PACKED) }; WIDTH],
size: AtomicU16::new(0),
suffix_count: AtomicU8::new(0),
_pad: 0,
data: UnsafeCell::new([0u8; CAPACITY]),
}
}
#[inline(always)]
#[expect(clippy::indexing_slicing, reason = "Bounds checked via debug_assert")]
fn load_meta(&self, slot: usize) -> InlineSlotMeta {
debug_assert!(slot < WIDTH, "slot {slot} >= WIDTH {WIDTH}");
let packed: u32 = self.slots[slot].load(READ_ORD);
InlineSlotMeta::unpack(packed)
}
#[inline(always)]
#[expect(clippy::indexing_slicing, reason = "Bounds checked via debug_assert")]
fn store_meta(&self, slot: usize, meta: InlineSlotMeta) {
debug_assert!(slot < WIDTH, "slot {slot} >= WIDTH {WIDTH}");
self.slots[slot].store(meta.pack(), WRITE_ORD);
}
#[must_use]
#[inline(always)]
#[expect(clippy::unused_self)]
pub const fn capacity(&self) -> usize {
CAPACITY
}
#[must_use]
#[inline(always)]
pub fn used(&self) -> usize {
self.size.load(RELAXED) as usize
}
#[must_use]
#[inline(always)]
pub fn remaining(&self) -> usize {
CAPACITY - self.used()
}
#[must_use]
#[inline(always)]
pub fn count(&self) -> usize {
self.suffix_count.load(RELAXED) as usize
}
#[expect(clippy::indexing_slicing)]
fn drain_core(
&self,
active_slots: impl Iterator<Item = usize>,
new_slot: usize,
new_suffix: &[u8],
buffer: Option<Vec<u8>>,
) -> SuffixBag {
let mut required_capacity: usize = new_suffix.len();
let mut slots_to_copy: [(usize, usize, usize); WIDTH] = [(0, 0, 0); WIDTH];
let mut copy_count: usize = 0;
let data: &[u8; CAPACITY] = unsafe { &*self.data.get() };
for slot in active_slots {
if (slot != new_slot) && (slot < WIDTH) {
let meta: InlineSlotMeta = self.load_meta(slot);
if meta.has_suffix() {
let start: usize = meta.offset as usize;
let len: usize = meta.len as usize;
required_capacity += len;
if copy_count < WIDTH {
slots_to_copy[copy_count] = (slot, start, len);
copy_count += 1;
}
}
}
}
let avg_suffix: usize = if copy_count > 0 {
required_capacity / (copy_count + 1)
} else {
new_suffix.len()
};
let headroom: usize = (WIDTH - (copy_count + 1)) * avg_suffix;
let target_capacity: usize = required_capacity + headroom;
let mut external: SuffixBag = match buffer {
Some(vec) => {
let mut bag: SuffixBag = SuffixBag::from_vec(vec);
if bag.capacity() < target_capacity {
bag.reserve(target_capacity.saturating_sub(bag.used()));
}
bag
}
None => SuffixBag::with_capacity(target_capacity),
};
for &(slot, start, len) in &slots_to_copy[..copy_count] {
let suffix: &[u8] = &data[start..(start + len)];
external.assign(slot, suffix);
}
external.assign(new_slot, new_suffix);
external
}
pub fn drain_to_external(
&self,
perm: &impl TreePermutation,
new_slot: usize,
new_suffix: &[u8],
) -> SuffixBag {
self.drain_core(
(0..perm.size()).map(|i: usize| perm.get(i)),
new_slot,
new_suffix,
None,
)
}
pub fn drain_to_external_with_vec(
&self,
perm: &impl TreePermutation,
new_slot: usize,
new_suffix: &[u8],
buffer: Vec<u8>,
) -> SuffixBag {
self.drain_core(
(0..perm.size()).map(|i: usize| perm.get(i)),
new_slot,
new_suffix,
Some(buffer),
)
}
#[cold]
pub fn drain_to_external_init(&self, new_slot: usize, new_suffix: &[u8]) -> SuffixBag {
self.drain_core(0..new_slot, new_slot, new_suffix, None)
}
#[must_use]
#[inline(always)]
pub fn has_suffix(&self, slot: usize) -> bool {
debug_assert!(slot < WIDTH, "slot {slot} >= WIDTH {WIDTH}");
self.load_meta(slot).has_suffix()
}
#[must_use]
#[inline(always)]
pub fn get(&self, slot: usize) -> Option<&[u8]> {
debug_assert!(slot < WIDTH, "slot {slot} >= WIDTH {WIDTH}");
let meta: InlineSlotMeta = self.load_meta(slot);
if !meta.has_suffix() {
return None;
}
let start: usize = meta.offset as usize;
let len: usize = meta.len as usize;
debug_assert!(
start + len <= CAPACITY,
"inline suffix metadata points past capacity: {} > {CAPACITY}",
start + len
);
let data_ptr: *const u8 = self.data.get().cast::<u8>();
let suffix: &[u8] = unsafe { std::slice::from_raw_parts(data_ptr.add(start), len) };
Some(suffix)
}
#[must_use]
#[inline(always)]
pub fn get_or_empty(&self, slot: usize) -> &[u8] {
self.get(slot).unwrap_or(&[])
}
#[inline]
pub fn try_assign(&self, slot: usize, suffix: &[u8]) -> bool {
debug_assert!(slot < WIDTH, "slot {slot} >= WIDTH {WIDTH}");
let suffix_len: usize = suffix.len();
if suffix_len > U16_MAX {
return false;
}
let meta: InlineSlotMeta = self.load_meta(slot);
let data_ptr: *mut u8 = self.data.get().cast::<u8>();
if meta.has_suffix() && (suffix_len <= (meta.len as usize)) {
let start: usize = meta.offset as usize;
unsafe {
StdPtr::copy_nonoverlapping(suffix.as_ptr(), data_ptr.add(start), suffix_len);
}
#[expect(clippy::cast_possible_truncation, reason = "len checked above")]
self.store_meta(
slot,
InlineSlotMeta {
offset: meta.offset,
len: suffix_len as u16,
},
);
return true;
}
let current_size: usize = self.size.load(RELAXED) as usize;
if (current_size + suffix_len) <= CAPACITY {
unsafe {
StdPtr::copy_nonoverlapping(
suffix.as_ptr(),
data_ptr.add(current_size),
suffix_len,
);
}
if !meta.has_suffix() {
self.suffix_count.fetch_add(1, RELAXED);
}
#[expect(
clippy::cast_possible_truncation,
reason = "offset and len checked to fit"
)]
{
self.store_meta(
slot,
InlineSlotMeta {
offset: current_size as u16,
len: suffix_len as u16,
},
);
self.size.store((current_size + suffix_len) as u16, RELAXED);
}
return true;
}
false
}
#[inline(always)]
pub fn clear(&self, slot: usize) {
debug_assert!(slot < WIDTH, "slot {slot} >= WIDTH {WIDTH}");
if self.load_meta(slot).has_suffix() {
self.suffix_count.fetch_sub(1, RELAXED);
}
self.store_meta(slot, InlineSlotMeta::EMPTY);
}
#[inline(always)]
pub fn clear_all(&self) {
for slot in 0..WIDTH {
self.store_meta(slot, InlineSlotMeta::EMPTY);
}
self.size.store(0, RELAXED);
self.suffix_count.store(0, RELAXED);
}
#[must_use]
#[inline(always)]
pub fn suffix_equals(&self, slot: usize, suffix: &[u8]) -> bool {
self.get(slot).is_some_and(|stored: &[u8]| stored == suffix)
}
#[must_use]
#[inline(always)]
pub fn suffix_compare(&self, slot: usize, suffix: &[u8]) -> Option<Ordering> {
self.get(slot).map(|stored: &[u8]| stored.cmp(suffix))
}
}
impl Default for InlineSuffixBag {
#[inline(always)]
fn default() -> Self {
Self::new()
}
}
impl Clone for InlineSuffixBag {
#[inline(always)]
fn clone(&self) -> Self {
let data: [u8; CAPACITY] = unsafe { *self.data.get() };
Self {
slots: std::array::from_fn(|i: usize| AtomicU32::new(self.slots[i].load(RELAXED))),
size: AtomicU16::new(self.size.load(RELAXED)),
suffix_count: AtomicU8::new(self.suffix_count.load(RELAXED)),
_pad: 0,
data: UnsafeCell::new(data),
}
}
}