use alloc::boxed::Box;
use core::num::NonZeroU64;
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct IdRange {
pub start: NonZeroU64,
pub end: NonZeroU64,
}
pub const START: NonZeroU64 = unsafe { NonZeroU64::new_unchecked(1) };
pub const END: NonZeroU64 = unsafe { NonZeroU64::new_unchecked(u64::MAX) };
impl IdRange {
pub fn proper(&self) -> Self {
IdRange {
start: self.start,
end: self.end.max(self.start),
}
}
pub fn count(&self) -> u64 {
debug_assert!(self.start <= self.end);
self.end.get() - self.start.get()
}
pub fn is_empty(&self) -> bool {
debug_assert!(self.start <= self.end);
self.start == self.end
}
pub fn get(&self, idx: u64) -> Option<NonZeroU64> {
if idx >= self.count() {
return None;
}
Some(unsafe { NonZeroU64::new_unchecked(self.start.get() + idx) })
}
pub fn advance(&mut self, count: u64, mut f: impl FnMut(NonZeroU64)) -> u64 {
let count = count.min(self.count());
let mut id = self.start;
self.start = unsafe { NonZeroU64::new_unchecked(self.start.get() + count) };
while id < self.start {
f(id);
unsafe { id = NonZeroU64::new_unchecked(id.get() + 1) };
}
count
}
pub fn take(&mut self) -> Option<NonZeroU64> {
if self.is_empty() {
return None;
}
let id = self.start;
self.start = unsafe { NonZeroU64::new_unchecked(id.get() + 1) };
Some(id)
}
}
pub(super) struct IdAllocator {
current: IdRange,
next: IdRange,
range_alloc: Box<dyn IdRangeAllocator>,
}
impl IdAllocator {
pub fn new() -> Self {
IdAllocator {
current: IdRange {
start: START,
end: END,
},
next: IdRange {
start: END,
end: END,
},
range_alloc: Box::new(DummyAllocator),
}
}
pub fn with_range_allocator(mut range_alloc: Box<dyn IdRangeAllocator>) -> Self {
let current = range_alloc.allocate_range().proper();
let next = range_alloc.allocate_range().proper();
IdAllocator {
current,
next,
range_alloc,
}
}
pub fn next(&mut self) -> Option<NonZeroU64> {
if self.current.is_empty() {
self.current = self.next;
self.next = self.range_alloc.allocate_range().proper();
}
self.current.take()
}
pub fn reserve(&self, idx: u64) -> Option<NonZeroU64> {
if let Some(id) = self.current.get(idx) {
return Some(id);
}
let idx2 = idx - self.current.count();
self.next.get(idx2)
}
pub fn reserved(&self, id: NonZeroU64) -> Option<u64> {
let id = id.get();
if id >= self.current.start.get() && id < self.current.end.get() {
return Some(id - self.current.start.get());
}
if id >= self.next.start.get() && id < self.next.end.get() {
return Some(id - self.next.start.get() + self.current.count());
}
None
}
#[inline(always)]
pub unsafe fn flush_reserved(&mut self, count: u64, mut f: impl FnMut(NonZeroU64)) {
let mut advanced = self.current.advance(count, &mut f);
if advanced < count {
advanced += self.next.advance(count - advanced, &mut f);
self.current = self.next;
self.next = self.range_alloc.allocate_range().proper();
}
debug_assert_eq!(advanced, count);
}
}
pub unsafe trait IdRangeAllocator: Send + Sync + 'static {
fn allocate_range(&mut self) -> IdRange;
}
struct DummyAllocator;
unsafe impl IdRangeAllocator for DummyAllocator {
fn allocate_range(&mut self) -> IdRange {
IdRange {
start: END,
end: END,
}
}
}
pub struct OneRangeAllocator {
range: IdRange,
}
const fn client_range() -> IdRange {
IdRange {
start: unsafe { NonZeroU64::new_unchecked(1) },
end: unsafe { NonZeroU64::new_unchecked(1 << 48) },
}
}
const fn server_range() -> IdRange {
IdRange {
start: unsafe { NonZeroU64::new_unchecked(1 << 48) },
end: unsafe { NonZeroU64::new_unchecked(u64::MAX) },
}
}
impl OneRangeAllocator {
pub const fn new(range: IdRange) -> Self {
OneRangeAllocator { range }
}
pub const fn client() -> Self {
OneRangeAllocator {
range: client_range(),
}
}
pub const fn server() -> Self {
OneRangeAllocator {
range: server_range(),
}
}
}
unsafe impl IdRangeAllocator for OneRangeAllocator {
fn allocate_range(&mut self) -> IdRange {
let range = self.range;
self.range.start = END;
self.range.end = END;
range
}
}