use core::cmp::{Ordering, PartialOrd};
use core::intrinsics::{assume, const_eval_select};
use core::mem::{size_of, MaybeUninit};
use crate::StaticVec;
#[inline(always)]
pub(crate) fn runtime_distance_between<T>(dest: *const T, origin: *const T) -> usize {
(dest as usize).wrapping_sub(origin as usize)
}
#[inline(always)]
pub(crate) const fn compiletime_distance_between<T>(_dest: *const T, _origin: *const T) -> usize {
assert!(
size_of::<T>() == 0,
"`compiletime_zst_handler` called on a non-ZST!"
);
panic!("`distance_between` is only currently usefully `const` for non-ZSTs!");
}
#[inline(always)]
pub(crate) const fn distance_between<T>(dest: *const T, origin: *const T) -> usize {
match size_of::<T>() {
0 => unsafe {
const_eval_select(
(dest, origin),
compiletime_distance_between,
runtime_distance_between,
)
},
_ => unsafe { dest.offset_from(origin) as usize },
}
}
#[inline(always)]
pub(crate) const fn const_min(lhs: usize, rhs: usize) -> usize {
if lhs < rhs {
lhs
} else {
rhs
}
}
#[inline]
pub(crate) const fn reverse_copy<T, const N: usize>(
length: usize,
this: &MaybeUninit<[T; N]>,
) -> MaybeUninit<[T; N]>
where
T: Copy,
{
let mut i = length;
let src = StaticVec::first_ptr(this);
let mut res = StaticVec::new_data_uninit();
let mut dest = StaticVec::first_ptr_mut(&mut res);
unsafe {
assume(!src.is_null());
assume(!(dest as *mut T).is_null());
}
while i > 0 {
unsafe {
src.add(i - 1).copy_to_nonoverlapping(dest, 1);
dest = dest.offset(1);
i -= 1;
}
}
res
}
#[allow(dead_code)]
#[inline(always)]
pub fn new_from_value<T, const COUNT: usize>(value: T) -> StaticVec<T, COUNT>
where T: Copy {
StaticVec {
data: {
unsafe {
let mut data = StaticVec::new_data_uninit();
for i in 0..COUNT {
(data.as_mut_ptr() as *mut T).add(i).write(value);
}
data
}
},
length: COUNT,
}
}
#[inline(always)]
pub(crate) const fn compiletime_zst_ptr_add<T>(ptr: *const T, count: usize) -> *const T {
(ptr as *const u8).wrapping_add(count) as *const T
}
#[inline(always)]
pub(crate) const fn compiletime_zst_ptr_add_mut<T>(ptr: *mut T, count: usize) -> *mut T {
(ptr as *mut u8).wrapping_add(count) as *mut T
}
#[inline(always)]
pub(crate) fn runtime_zst_ptr_add<T>(ptr: *const T, count: usize) -> *const T {
(ptr as usize + count) as *const T
}
#[inline(always)]
pub(crate) fn runtime_zst_ptr_add_mut<T>(ptr: *mut T, count: usize) -> *mut T {
(ptr as usize + count) as *mut T
}
#[inline(always)]
pub(crate) const fn zst_ptr_add<T>(ptr: *const T, count: usize) -> *const T {
debug_assert!(size_of::<T>() == 0, "`zst_ptr_add` called on a non-ZST!");
unsafe { const_eval_select((ptr, count), compiletime_zst_ptr_add, runtime_zst_ptr_add) }
}
#[inline(always)]
pub(crate) const fn zst_ptr_add_mut<T>(ptr: *mut T, count: usize) -> *mut T {
debug_assert!(
size_of::<T>() == 0,
"`zst_ptr_add_mut` called on a non-ZST!"
);
unsafe {
const_eval_select(
(ptr, count),
compiletime_zst_ptr_add_mut,
runtime_zst_ptr_add_mut,
)
}
}
#[inline]
pub(crate) fn partial_compare<T1, T2: PartialOrd<T1>>(
this: &[T2],
other: &[T1],
) -> Option<Ordering> {
let min_length = this.len().min(other.len());
unsafe {
let left = this.get_unchecked(0..min_length);
let right = other.get_unchecked(0..min_length);
for i in 0..min_length {
match left.get_unchecked(i).partial_cmp(right.get_unchecked(i)) {
Some(Ordering::Equal) => (),
non_eq => return non_eq,
}
}
}
this.len().partial_cmp(&other.len())
}
#[inline]
pub(crate) const fn quicksort_internal<T: Copy + ~const PartialOrd>(
values: *mut T,
mut low: isize,
mut high: isize,
) {
unsafe { assume(!values.is_null()) };
loop {
let mut i = low;
let mut j = high;
unsafe {
let p = *values.offset(low + ((high - low) >> 1));
loop {
while *values.offset(i) < p {
i += 1;
}
while *values.offset(j) > p {
j -= 1;
}
if i <= j {
if i != j {
let q = *values.offset(i);
*values.offset(i) = *values.offset(j);
*values.offset(j) = q;
}
i += 1;
j -= 1;
}
if i > j {
break;
}
}
}
if j - low < high - i {
if low < j {
quicksort_internal(values, low, j);
}
low = i;
} else {
if i < high {
quicksort_internal(values, i, high)
}
high = j;
}
if low >= high {
break;
}
}
}