use rkyv::{
Archive, Archived, Deserialize, DeserializeUnsized, Fallible, RelPtr, Serialize,
SerializeUnsized, out_field,
ser::{ScratchSpace, Serializer},
};
use size_of::{Context, SizeOf};
use std::{
alloc::{self, Layout, handle_alloc_error},
cmp::{Ordering, max, min},
fmt::{self, Debug, Display, Formatter},
hash::{Hash, Hasher},
iter::FusedIterator,
marker::PhantomData,
mem::{ManuallyDrop, align_of, forget, size_of, take},
ops::{Bound, Index, IndexMut, RangeBounds},
pin::Pin,
ptr::{self, NonNull, drop_in_place},
slice,
};
#[macro_export]
macro_rules! lean_vec {
() => (
$crate::dynamic::LeanVec::from(vec![])
);
($elem:expr_2021; $n:expr_2021) => (
$crate::dynamic::LeanVec::from(vec![$elem;$n])
);
($($x:expr_2021),+ $(,)?) => (
$crate::dynamic::LeanVec::from(vec![$($x),+])
);
}
pub struct RawVec {
ptr: *mut u8,
val_size: usize,
align: usize,
length: usize,
capacity: usize,
}
impl RawVec {
pub fn new(val_size: usize, align: usize, dangling: *mut u8) -> Self {
Self {
ptr: dangling,
val_size,
align,
length: 0,
capacity: if val_size == 0 { usize::MAX } else { 0 },
}
}
pub fn with_capacity(
val_size: usize,
align: usize,
capacity: usize,
dangling: *mut u8,
) -> Self {
let mut this = Self::new(val_size, align, dangling);
this.reserve_exact(capacity);
this
}
pub unsafe fn from_raw_parts(
ptr: *mut u8,
length: usize,
capacity: usize,
val_size: usize,
align: usize,
) -> Self {
debug_assert!(capacity >= length);
Self {
ptr,
val_size,
align,
length,
capacity,
}
}
pub const fn len(&self) -> usize {
self.length
}
pub fn spare_capacity(&self) -> usize {
self.capacity() - self.len()
}
pub fn has_spare_capacity(&self) -> bool {
self.spare_capacity() > 0
}
pub unsafe fn set_len(&mut self, new_len: usize) {
debug_assert!(new_len <= self.capacity());
self.length = new_len;
}
pub const fn is_empty(&self) -> bool {
self.length == 0
}
pub const fn capacity(&self) -> usize {
self.capacity
}
pub const fn as_ptr(&self) -> *const u8 {
self.ptr as *const u8
}
pub fn as_mut_ptr(&mut self) -> *mut u8 {
self.ptr
}
pub fn reserve(&mut self, additional: usize) {
let required = self.len() + additional;
if required <= self.capacity() {
return;
}
debug_assert_ne!(required, 0);
let mut new_capacity = self.next_capacity(required);
while new_capacity < required {
new_capacity = self.next_capacity(new_capacity);
}
self.realloc(new_capacity);
}
pub fn reserve_exact(&mut self, additional: usize) {
let required = self.len() + additional;
if required <= self.capacity() {
return;
}
self.realloc(required);
}
pub fn clear(&mut self, drop_slice_in_place: &dyn Fn(*mut u8, usize)) {
if self.is_empty() {
return;
}
let length = self.len();
unsafe {
self.set_len(0);
drop_slice_in_place(self.as_mut_ptr(), length);
}
}
pub fn index(&self, idx: usize) -> *const u8 {
if idx < self.len() {
unsafe { self.as_ptr().add(idx * self.val_size) }
} else {
index_out_of_bounds(idx, self.len())
}
}
pub fn index_mut(&mut self, idx: usize) -> *mut u8 {
if idx < self.len() {
unsafe { self.as_mut_ptr().add(idx * self.val_size) }
} else {
index_out_of_bounds(idx, self.len())
}
}
pub unsafe fn index_unchecked(&self, idx: usize) -> *const u8 {
unsafe {
debug_assert!(idx < self.len());
self.as_ptr().add(idx * self.val_size)
}
}
pub unsafe fn index_mut_unchecked(&mut self, idx: usize) -> *mut u8 {
unsafe {
debug_assert!(idx < self.len());
self.as_mut_ptr().add(idx * self.val_size)
}
}
pub fn range<R>(&self, range: R) -> (*const u8, usize)
where
R: RangeBounds<usize>,
{
self.range_inner(range.start_bound().cloned(), range.end_bound().cloned())
}
pub fn range_mut<R>(&mut self, range: R) -> (*mut u8, usize)
where
R: RangeBounds<usize>,
{
let (ptr, len) = self.range_inner(range.start_bound().cloned(), range.end_bound().cloned());
(ptr as *mut u8, len)
}
pub unsafe fn push_raw(&mut self, value: *const u8) {
self.reserve(1);
unsafe {
ptr::copy_nonoverlapping(
value,
self.as_mut_ptr().add(self.len() * self.val_size),
self.val_size,
);
}
self.length += 1;
}
pub unsafe fn push_unchecked(&mut self, value: *const u8) {
debug_assert_ne!(self.capacity(), 0, "cannot push to a vec of length zero");
debug_assert!(
self.len() < self.capacity(),
"cannot push to a vec without spare capacity",
);
unsafe {
ptr::copy_nonoverlapping(
value,
self.as_mut_ptr().add(self.len() * self.val_size),
self.val_size,
);
}
self.length += 1;
}
pub unsafe fn append_raw_range(&mut self, start: *const u8, length: usize) {
self.reserve(length);
unsafe {
ptr::copy_nonoverlapping(
start,
self.as_mut_ptr().add(self.len() * self.val_size),
self.val_size * length,
);
}
self.length += length;
}
pub fn eq(&self, other: &Self, eq_func: &dyn Fn(*const u8, *const u8) -> bool) -> bool {
if self.len() != other.len() {
return false;
}
for i in 0..self.len() {
if !eq_func(self.index(i), other.index(i)) {
return false;
}
}
true
}
pub fn cmp(
&self,
other: &Self,
cmp_func: &dyn Fn(*const u8, *const u8) -> Ordering,
) -> Ordering {
let common_len = min(self.len(), other.len());
for i in 0..common_len {
let ordering = cmp_func(self.index(i), other.index(i));
if ordering != Ordering::Equal {
return ordering;
}
}
self.len().cmp(&other.len())
}
#[allow(clippy::should_implement_trait)]
pub fn hash(&self, hash_func: &mut dyn FnMut(*const u8)) {
for i in 0..self.len() {
hash_func(self.index(i));
}
}
pub fn size_of_children(
&self,
context: &mut size_of::Context,
size_of_children_func: &mut dyn Fn(*const u8, &mut size_of::Context),
) {
if self.capacity() != 0 && self.val_size != 0 {
context
.add_vectorlike(self.len(), self.capacity(), self.val_size)
.add_distinct_allocation();
}
for i in 0..self.len() {
size_of_children_func(self.index(i), context);
}
}
pub fn debug(
&self,
f: &mut fmt::Formatter<'_>,
debug_func: &dyn Fn(*const u8, &mut fmt::Formatter) -> fmt::Result,
) -> fmt::Result {
struct DebugErased<'a>(
*const u8,
&'a dyn Fn(*const u8, &mut fmt::Formatter) -> fmt::Result,
);
impl Debug for DebugErased<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
(self.1)(self.0, f)
}
}
let mut list = f.debug_list();
for i in 0..self.len() {
list.entry(&DebugErased(self.index(i), debug_func));
}
list.finish()
}
pub fn dedup_starting_at(
&mut self,
starting_point: usize,
same_bucket: &mut dyn FnMut(*mut u8, *mut u8) -> bool,
drop_in_place: &mut dyn FnMut(*mut u8),
) {
let len = self.len();
if len <= 1 || starting_point + 1 >= len {
return;
}
struct FillGapOnDrop<'a> {
read: usize,
write: usize,
vec: &'a mut RawVec,
}
impl Drop for FillGapOnDrop<'_> {
fn drop(&mut self) {
unsafe {
let len = self.vec.len();
let key_ptr = self.vec.as_mut_ptr();
let items_left = len.wrapping_sub(self.read);
let dropped_key_ptr = key_ptr.add(self.write * self.vec.val_size);
let valid_key_ptr = key_ptr.add(self.read * self.vec.val_size);
ptr::copy(
valid_key_ptr,
dropped_key_ptr,
items_left * self.vec.val_size,
);
let dropped = self.read.wrapping_sub(self.write);
self.vec.set_len(len - dropped);
}
}
}
let mut gap = FillGapOnDrop {
read: starting_point + 1,
write: starting_point + 1,
vec: self,
};
let ptr = gap.vec.as_mut_ptr();
unsafe {
while gap.read < len {
let read_ptr = ptr.add(gap.read * gap.vec.val_size);
let prev_ptr = ptr.add(gap.write.wrapping_sub(1) * gap.vec.val_size);
if same_bucket(read_ptr, prev_ptr) {
gap.read += 1;
drop_in_place(read_ptr);
} else {
let write_key_ptr = ptr.add(gap.write * gap.vec.val_size);
ptr::copy(read_ptr, write_key_ptr, gap.vec.val_size);
gap.write += 1;
gap.read += 1;
}
}
gap.vec.set_len(gap.write);
forget(gap);
}
}
pub fn retain_starting_at(
&mut self,
starting_point: usize,
retain: &mut dyn FnMut(*mut u8) -> bool,
drop_in_place: &mut dyn FnMut(*mut u8),
) {
if self.is_empty() || starting_point >= self.len() {
return;
}
let original_len = self.len();
unsafe { self.set_len(0) };
struct BackshiftOnDrop<'a> {
v: &'a mut RawVec,
processed_len: usize,
deleted_cnt: usize,
original_len: usize,
}
impl Drop for BackshiftOnDrop<'_> {
fn drop(&mut self) {
if self.deleted_cnt > 0 {
unsafe {
ptr::copy(
self.v.as_ptr().add(self.processed_len * self.v.val_size),
self.v
.as_mut_ptr()
.add((self.processed_len - self.deleted_cnt) * self.v.val_size),
(self.original_len - self.processed_len) * self.v.val_size,
);
}
}
unsafe {
self.v.set_len(self.original_len - self.deleted_cnt);
}
}
}
let mut g = BackshiftOnDrop {
v: self,
processed_len: starting_point,
deleted_cnt: 0,
original_len,
};
fn process_loop<const DELETED: bool>(
original_len: usize,
f: &mut dyn FnMut(*mut u8) -> bool,
g: &mut BackshiftOnDrop<'_>,
drop_in_place: &mut dyn FnMut(*mut u8),
) {
while g.processed_len != original_len {
let cur = unsafe { g.v.as_mut_ptr().add(g.processed_len * g.v.val_size) };
if !f(cur) {
g.processed_len += 1;
g.deleted_cnt += 1;
drop_in_place(cur);
if DELETED {
continue;
} else {
break;
}
}
if DELETED {
unsafe {
let hole_slot =
g.v.as_mut_ptr()
.add((g.processed_len - g.deleted_cnt) * g.v.val_size);
ptr::copy_nonoverlapping(cur, hole_slot, g.v.val_size);
}
}
g.processed_len += 1;
}
}
process_loop::<false>(original_len, retain, &mut g, drop_in_place);
process_loop::<true>(original_len, retain, &mut g, drop_in_place);
drop(g);
}
fn truncate(&mut self, len: usize, drop_slice_in_place: &dyn Fn(*mut u8, usize)) {
unsafe {
if len > self.length {
return;
}
let remaining_len = self.length - len;
self.length = len;
drop_slice_in_place(self.as_mut_ptr().add(len * self.val_size), remaining_len);
}
}
fn is_sorted_by(&self, compare: &dyn Fn(*const u8, *const u8) -> Ordering) -> bool {
if self.len() <= 1 {
return true;
}
let mut last = self.index(0);
for i in 1..self.len() {
let current = self.index(i);
if compare(last, current) == Ordering::Greater {
return false;
}
last = current;
}
true
}
}
impl RawVec {
unsafe fn set_ptr(&mut self, ptr: *mut u8) {
self.ptr = ptr;
}
unsafe fn set_capacity(&mut self, capacity: usize) {
self.capacity = capacity;
}
fn realloc(&mut self, new_capacity: usize) {
debug_assert!(self.val_size > 0);
debug_assert!(new_capacity >= self.len());
if new_capacity == self.capacity() {
return;
}
let current_layout = self.layout_for(self.capacity());
let new_ptr = if new_capacity > 0 {
let new_layout = self.layout_for(new_capacity);
let new_ptr = unsafe {
if self.capacity() == 0 {
alloc::alloc(new_layout)
} else {
alloc::realloc(self.as_mut_ptr(), current_layout, new_layout.size())
}
};
if new_ptr.is_null() {
handle_alloc_error(new_layout);
}
new_ptr
} else {
unsafe { alloc::dealloc(self.as_mut_ptr(), current_layout) };
self.align as *mut u8
};
unsafe {
self.set_capacity(new_capacity);
self.set_ptr(new_ptr);
}
}
fn layout_for(&self, capacity: usize) -> Layout {
let bytes = self.next_aligned_capacity(capacity * self.val_size);
Layout::from_size_align(bytes, self.align).unwrap()
}
pub fn shrink_to_fit(&mut self) {
if self.has_spare_capacity() && self.capacity != usize::MAX {
self.realloc(self.len())
}
}
fn next_capacity(&self, capacity: usize) -> usize {
debug_assert_ne!(self.val_size, 0);
let size = self.val_size;
if capacity == 0 {
max(128 / size, 1)
} else if capacity > (4096 * 32) / size {
capacity * 2
} else {
(capacity * 3).div_ceil(2)
}
}
fn next_aligned_capacity(&self, capacity: usize) -> usize {
let remaining = capacity % self.align;
if remaining == 0 {
capacity
} else {
capacity + (self.align - remaining)
}
}
fn range_inner(
&self,
start_bound: Bound<usize>,
end_bound: Bound<usize>,
) -> (*const u8, usize) {
let (start, start_overflowed) = match start_bound {
Bound::Included(start) => (start, false),
Bound::Excluded(start) => start.overflowing_add(1),
Bound::Unbounded => (0, false),
};
let (end, end_overflowed) = match end_bound {
Bound::Included(end) => end.overflowing_add(1),
Bound::Excluded(end) => (end, false),
Bound::Unbounded => (self.len(), false),
};
if !start_overflowed && !end_overflowed && start <= end && end <= self.len() {
unsafe {
let len = end - start;
let start = self.as_ptr().add(start * self.val_size);
(start, len)
}
} else {
range_out_of_bounds(start_bound, end_bound, self.len())
}
}
fn drop_vec(&mut self, drop_slice_in_place: &dyn Fn(*mut u8, usize)) {
self.clear(drop_slice_in_place);
if self.capacity > 0 && self.val_size > 0 {
let layout = self.layout_for(self.capacity());
unsafe { alloc::dealloc(self.as_mut_ptr(), layout) };
}
}
}
#[cold]
#[inline(never)]
fn index_out_of_bounds(index: usize, length: usize) -> ! {
panic!("index out of bounds, length is {length} but the index was {index}")
}
#[cold]
#[inline(never)]
fn range_out_of_bounds(start: Bound<usize>, end: Bound<usize>, length: usize) -> ! {
struct RangeFmt(Bound<usize>, Bound<usize>);
impl Display for RangeFmt {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match (self.0, self.1) {
(Bound::Included(start), Bound::Included(end)) => write!(f, "{start}..={end}"),
(Bound::Included(start), Bound::Excluded(end)) => write!(f, "{start}..{end}"),
(Bound::Included(start), Bound::Unbounded) => write!(f, "{start}.."),
(Bound::Excluded(start), Bound::Included(end)) => {
write!(f, "{}..={end}", start.saturating_add(1))
}
(Bound::Excluded(start), Bound::Excluded(end)) => {
write!(f, "{}..{end}", start.saturating_add(1))
}
(Bound::Excluded(start), Bound::Unbounded) => {
write!(f, "{}..", start.saturating_add(1))
}
(Bound::Unbounded, Bound::Included(end)) => write!(f, "..={end}"),
(Bound::Unbounded, Bound::Excluded(end)) => write!(f, "..{end}"),
(Bound::Unbounded, Bound::Unbounded) => f.write_str(".."),
}
}
}
panic!(
"index {} is out of bounds of a slice with length {length}",
RangeFmt(start, end),
)
}
pub struct RawIter<'a> {
ptr: *mut u8,
start: usize,
end: usize,
val_size: usize,
__marker: PhantomData<&'a ()>,
}
impl<'a> RawIter<'a> {
fn new(vec: &'a RawVec) -> Self {
let ptr = vec.ptr;
Self {
ptr,
start: 0,
end: vec.len(),
val_size: vec.val_size,
__marker: PhantomData,
}
}
unsafe fn get_unchecked(&self, n: usize) -> *mut u8 {
unsafe { self.ptr.add(self.val_size * n) }
}
fn iter_len(&self) -> usize {
self.end - self.start
}
}
impl Iterator for RawIter<'_> {
type Item = *const u8;
fn next(&mut self) -> Option<Self::Item> {
if self.start == self.end {
None
} else {
self.start += 1;
Some(unsafe { self.get_unchecked(self.start - 1) })
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.iter_len();
(len, Some(len))
}
fn count(self) -> usize {
self.iter_len()
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
if n >= self.iter_len() {
self.start = self.end;
None
} else {
self.start += n;
Some(unsafe { self.get_unchecked(self.start - 1) })
}
}
fn last(mut self) -> Option<Self::Item> {
if self.start >= self.end {
None
} else {
self.start = self.end;
Some(unsafe { self.get_unchecked(self.start - 1) })
}
}
}
impl DoubleEndedIterator for RawIter<'_> {
fn next_back(&mut self) -> Option<Self::Item> {
if self.end == self.start {
None
} else {
self.end -= 1;
Some(unsafe { self.get_unchecked(self.end) })
}
}
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
if n >= self.iter_len() {
self.end = self.start;
None
} else {
self.end -= n;
Some(unsafe { self.get_unchecked(self.end) })
}
}
}
impl ExactSizeIterator for RawIter<'_> {
fn len(&self) -> usize {
self.iter_len()
}
}
impl FusedIterator for RawIter<'_> {}
pub struct LeanVec<T> {
pub vec: RawVec,
phantom: PhantomData<T>,
}
pub struct ArchivedLeanVec<T> {
ptr: RelPtr<T>,
len: Archived<usize>,
}
impl<T> ArchivedLeanVec<T> {
#[inline]
pub fn as_ptr(&self) -> *const T {
self.ptr.as_ptr()
}
#[inline]
pub fn len(&self) -> usize {
self.len as usize
}
#[inline]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
#[inline]
pub fn as_slice(&self) -> &[T] {
unsafe { core::slice::from_raw_parts(self.as_ptr(), self.len()) }
}
#[inline]
pub fn pin_mut_slice(self: Pin<&mut Self>) -> Pin<&mut [T]> {
unsafe {
self.map_unchecked_mut(|s| core::slice::from_raw_parts_mut(s.ptr.as_mut_ptr(), s.len()))
}
}
#[inline]
pub fn index_pin<I>(self: Pin<&mut Self>, index: I) -> Pin<&mut <[T] as Index<I>>::Output>
where
[T]: IndexMut<I>,
{
unsafe { self.pin_mut_slice().map_unchecked_mut(|s| &mut s[index]) }
}
#[inline]
pub unsafe fn resolve_from_slice<U: Archive<Archived = T>>(
slice: &[U],
pos: usize,
resolver: LeanVecResolver,
out: *mut Self,
) {
unsafe {
Self::resolve_from_len(slice.len(), pos, resolver, out);
}
}
#[inline]
pub unsafe fn resolve_from_len(
len: usize,
pos: usize,
resolver: LeanVecResolver,
out: *mut Self,
) {
unsafe {
let (fp, fo) = out_field!(out.ptr);
RelPtr::emplace(pos + fp, resolver.pos, fo);
let (fp, fo) = out_field!(out.len);
usize::resolve(&len, pos + fp, (), fo);
}
}
#[inline]
pub fn serialize_from_slice<U: Serialize<S, Archived = T>, S: Serializer + ?Sized>(
slice: &[U],
serializer: &mut S,
) -> Result<LeanVecResolver, S::Error>
where
[U]: SerializeUnsized<S>,
{
Ok(LeanVecResolver {
pos: slice.serialize_unsized(serializer)?,
})
}
}
pub struct LeanVecResolver {
pos: usize,
}
impl<T> Archive for LeanVec<T>
where
T: Archive,
{
type Archived = ArchivedLeanVec<T::Archived>;
type Resolver = LeanVecResolver;
unsafe fn resolve(&self, pos: usize, resolver: Self::Resolver, out: *mut Self::Archived) {
unsafe {
ArchivedLeanVec::resolve_from_slice(self.as_slice(), pos, resolver, out);
}
}
}
impl<S, T> Serialize<S> for LeanVec<T>
where
T: Serialize<S>,
S: ScratchSpace + Serializer + ?Sized,
{
fn serialize(&self, serializer: &mut S) -> ::core::result::Result<Self::Resolver, S::Error> {
ArchivedLeanVec::<T::Archived>::serialize_from_slice(self.as_slice(), serializer)
}
}
impl<D, T> Deserialize<LeanVec<T>, D> for Archived<LeanVec<T>>
where
D: Fallible + ?Sized,
T: Archive,
[T::Archived]: DeserializeUnsized<[T], D>,
{
fn deserialize(&self, deserializer: &mut D) -> ::core::result::Result<LeanVec<T>, D::Error> {
unsafe {
let data_address = self
.as_slice()
.deserialize_unsized(deserializer, |layout| alloc::alloc(layout))?;
let metadata = self.as_slice().deserialize_metadata(deserializer)?;
let ptr = ptr_meta::from_raw_parts_mut(data_address, metadata);
Ok(Box::<[T]>::from_raw(ptr).into())
}
}
}
impl<T> PartialEq for ArchivedLeanVec<T>
where
T: Ord,
{
fn eq(&self, other: &Self) -> bool {
self.as_slice().eq(other.as_slice())
}
}
impl<T> Eq for ArchivedLeanVec<T> where T: Ord {}
impl<T> PartialOrd for ArchivedLeanVec<T>
where
T: Ord,
{
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl<T> Ord for ArchivedLeanVec<T>
where
T: Ord,
{
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.as_slice().cmp(other.as_slice())
}
}
impl<T> LeanVec<T> {
pub fn new() -> Self {
Self {
vec: RawVec::new(
size_of::<T>(),
align_of::<T>(),
NonNull::<T>::dangling().as_ptr() as *mut u8,
),
phantom: PhantomData,
}
}
pub fn with_capacity(capacity: usize) -> Self {
Self {
vec: RawVec::with_capacity(
size_of::<T>(),
align_of::<T>(),
capacity,
NonNull::<T>::dangling().as_ptr() as *mut u8,
),
phantom: PhantomData,
}
}
pub unsafe fn from_raw_parts(ptr: *mut T, length: usize, capacity: usize) -> Self {
unsafe {
Self {
vec: RawVec::from_raw_parts(
ptr as *mut u8,
length,
capacity,
size_of::<T>(),
align_of::<T>(),
),
phantom: PhantomData,
}
}
}
pub const fn len(&self) -> usize {
self.vec.len()
}
pub fn spare_capacity(&self) -> usize {
self.vec.spare_capacity()
}
pub fn has_spare_capacity(&self) -> bool {
self.vec.has_spare_capacity()
}
pub unsafe fn set_len(&mut self, new_len: usize) {
unsafe { self.vec.set_len(new_len) }
}
pub const fn is_empty(&self) -> bool {
self.vec.is_empty()
}
pub const fn capacity(&self) -> usize {
self.vec.capacity()
}
pub const fn as_ptr(&self) -> *const T {
self.vec.as_ptr() as *const T
}
pub fn as_slice(&self) -> &[T] {
unsafe { slice::from_raw_parts(self.as_ptr(), self.len()) }
}
pub fn as_mut_ptr(&mut self) -> *mut T {
self.vec.as_mut_ptr() as *mut T
}
pub fn as_mut_slice(&mut self) -> &mut [T] {
unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), self.len()) }
}
pub(crate) fn get(&self, index: usize) -> &T {
unsafe { &*(self.vec.index(index) as *const T) }
}
pub(crate) fn get_mut(&mut self, index: usize) -> &mut T {
unsafe { &mut *(self.vec.index_mut(index) as *mut T) }
}
pub(crate) unsafe fn get_unchecked(&self, index: usize) -> &T {
unsafe { &*(self.vec.index_unchecked(index) as *const T) }
}
pub(crate) unsafe fn get_mut_unchecked(&mut self, index: usize) -> &mut T {
unsafe { &mut *(self.vec.index_mut_unchecked(index) as *mut T) }
}
pub fn reserve(&mut self, additional: usize) {
self.vec.reserve(additional)
}
pub fn reserve_exact(&mut self, additional: usize) {
self.vec.reserve_exact(additional)
}
pub fn clear(&mut self) {
self.vec.clear(&Self::drop_slice_in_place)
}
fn drop_slice_in_place(ptr: *mut u8, size: usize) {
for i in 0..size {
unsafe { drop_in_place((ptr as *mut T).add(i)) }
}
}
pub fn truncate(&mut self, len: usize) {
self.vec.truncate(len, &Self::drop_slice_in_place);
}
pub fn push(&mut self, value: T) {
let value = ManuallyDrop::new(value);
unsafe { self.vec.push_raw(&*value as *const T as *const u8) };
}
pub unsafe fn push_unchecked(&mut self, value: T) {
unsafe {
let value = ManuallyDrop::new(value);
self.vec.push_unchecked(&*value as *const T as *const u8);
}
}
pub fn extend_from_slice(&mut self, other: &[T])
where
T: Clone,
{
self.reserve(other.len());
for val in other.iter() {
self.push(val.clone());
}
}
pub fn append_from_slice(&mut self, other: &mut [T])
where
T: Default,
{
self.reserve(other.len());
for val in other.iter_mut() {
self.push(take(val))
}
}
pub fn append(&mut self, other: &mut Self) {
unsafe {
self.vec.append_raw_range(other.vec.as_ptr(), other.len());
other.vec.set_len(0);
}
}
pub fn dedup(&mut self)
where
T: Eq,
{
self.dedup_by(|x, y| x == y)
}
pub fn dedup_by<F>(&mut self, same_bucket: F)
where
F: FnMut(&mut T, &mut T) -> bool,
{
self.dedup_by_starting_at(0, same_bucket);
}
pub fn dedup_by_starting_at<F>(&mut self, start: usize, mut same_bucket: F)
where
F: FnMut(&mut T, &mut T) -> bool,
{
self.vec.dedup_starting_at(
start,
&mut |x, y| {
let x = unsafe { &mut *(x as *mut T) };
let y = unsafe { &mut *(y as *mut T) };
same_bucket(x, y)
},
&mut |x| {
let x = x as *mut T;
unsafe { ptr::drop_in_place(x) };
},
)
}
pub fn retain<F>(&mut self, f: F)
where
F: FnMut(&mut T) -> bool,
{
self.retain_starting_at(0, f);
}
pub fn retain_starting_at<F>(&mut self, start: usize, mut f: F)
where
F: FnMut(&mut T) -> bool,
{
self.vec.retain_starting_at(
start,
&mut |x| {
let x = unsafe { &mut *(x as *mut T) };
f(x)
},
&mut |x| {
let x = x as *mut T;
unsafe { ptr::drop_in_place(x) };
},
)
}
pub fn is_sorted_by<F>(&self, compare: F) -> bool
where
F: Fn(&T, &T) -> Ordering,
{
self.vec.is_sorted_by(&|x, y| {
let x = unsafe { &*(x as *const T) };
let y = unsafe { &*(y as *const T) };
compare(x, y)
})
}
pub fn raw_iter(&self) -> RawIter<'_> {
RawIter::new(&self.vec)
}
}
impl<T> From<Vec<T>> for LeanVec<T> {
fn from(other: Vec<T>) -> Self {
let mut other = ManuallyDrop::new(other);
unsafe { Self::from_raw_parts(other.as_mut_ptr(), other.len(), other.capacity()) }
}
}
impl<T> From<Box<[T]>> for LeanVec<T> {
fn from(other: Box<[T]>) -> Self {
let mut other = ManuallyDrop::new(other);
unsafe { Self::from_raw_parts(other.as_mut_ptr(), other.len(), other.len()) }
}
}
impl<T> From<LeanVec<T>> for Vec<T> {
fn from(vec: LeanVec<T>) -> Self {
let mut vec = ManuallyDrop::new(vec);
unsafe { Vec::from_raw_parts(vec.as_mut_ptr(), vec.len(), vec.capacity()) }
}
}
impl<T> Drop for LeanVec<T> {
fn drop(&mut self) {
self.vec.drop_vec(&Self::drop_slice_in_place);
}
}
unsafe impl<T: Send> Send for LeanVec<T> {}
unsafe impl<T: Sync> Sync for LeanVec<T> {}
impl<T, R: RangeBounds<usize>> Index<R> for LeanVec<T> {
type Output = [T];
fn index(&self, range: R) -> &Self::Output {
let (ptr, len) = self.vec.range(range);
unsafe { slice::from_raw_parts(ptr as *const T, len) }
}
}
impl<T, R: RangeBounds<usize>> IndexMut<R> for LeanVec<T> {
fn index_mut(&mut self, range: R) -> &mut Self::Output {
let (ptr, len) = self.vec.range_mut(range);
unsafe { slice::from_raw_parts_mut(ptr as *mut T, len) }
}
}
impl<T: Clone> Clone for LeanVec<T> {
fn clone(&self) -> Self {
let mut result = Self::with_capacity(self.len());
result.extend_from_slice(self.as_slice());
result
}
}
impl<T> Default for LeanVec<T> {
fn default() -> Self {
Self::new()
}
}
impl<T: PartialEq> PartialEq for LeanVec<T> {
fn eq(&self, other: &Self) -> bool {
self.vec.eq(&other.vec, &|x, y| {
let x = unsafe { &*(x as *const T) };
let y = unsafe { &*(y as *const T) };
x == y
})
}
}
impl<T: Eq> Eq for LeanVec<T> {}
impl<T: Ord> PartialOrd for LeanVec<T> {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl<T: Ord> Ord for LeanVec<T> {
fn cmp(&self, other: &Self) -> Ordering {
self.vec.cmp(&other.vec, &|x, y| {
let x = unsafe { &*(x as *const T) };
let y = unsafe { &*(y as *const T) };
x.cmp(y)
})
}
}
impl<T: Hash> Hash for LeanVec<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.vec.hash(&mut |x| x.hash(state));
}
}
impl<T: SizeOf> SizeOf for LeanVec<T> {
fn size_of_children(&self, context: &mut Context) {
self.vec.size_of_children(context, &mut |x, ctx| {
let x = unsafe { &*(x as *const T) };
T::size_of_children(x, ctx);
});
}
}
impl<T: Debug> Debug for LeanVec<T> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
self.vec.debug(f, &|x, f| {
let x = unsafe { &*(x as *const T) };
Debug::fmt(x, f)
})
}
}