use alloc::vec::Vec;
use core::{
mem::{ManuallyDrop, MaybeUninit},
sync::atomic::{AtomicBool, Ordering},
};
pub struct PartialInit<T> {
data: ManuallyDrop<Vec<MaybeUninit<T>>>,
flags: Vec<AtomicBool>,
}
impl<T> PartialInit<T> {
pub fn new(len: usize) -> Self {
let mut data = Vec::with_capacity(len);
unsafe { data.set_len(len) };
let mut flags = Vec::with_capacity(len);
for _ in 0..len {
flags.push(AtomicBool::new(false));
}
Self {
data: ManuallyDrop::new(data),
flags,
}
}
pub fn as_mut_ptr(&mut self) -> *mut MaybeUninit<T> {
self.data.as_mut_ptr()
}
pub fn flags(&self) -> &[AtomicBool] {
&self.flags
}
pub unsafe fn into_vec(mut self) -> Vec<T> {
debug_assert!(
self.flags.iter().all(|f| f.load(Ordering::Acquire)),
"into_vec called with uninitialized slots"
);
let len = self.data.len();
let cap = self.data.capacity();
let ptr = self.data.as_mut_ptr().cast::<T>();
let mut md = ManuallyDrop::new(self);
unsafe { core::ptr::drop_in_place(&mut md.flags) };
unsafe { Vec::from_raw_parts(ptr, len, cap) }
}
}
impl<T> Drop for PartialInit<T> {
fn drop(&mut self) {
for i in 0..self.data.len() {
let initialized = self
.flags
.get(i)
.map_or(false, |f| f.load(Ordering::Acquire));
if initialized {
unsafe { self.data.get_unchecked_mut(i).assume_init_drop() };
}
}
unsafe { ManuallyDrop::drop(&mut self.data) };
}
}
#[cfg(test)]
#[allow(
clippy::unwrap_used,
clippy::expect_used,
clippy::cast_possible_truncation,
clippy::items_after_statements
)]
mod tests {
use super::PartialInit;
use crate::codec::compressed_vector::CompressedVector;
use alloc::sync::Arc;
fn mark(pi: &PartialInit<impl Sized>, i: usize) {
if let Some(flag) = pi.flags().get(i) {
flag.store(true, core::sync::atomic::Ordering::Release);
}
}
#[test]
fn partial_init_all_slots_round_trips() {
let mut pi: PartialInit<u32> = PartialInit::new(4);
let base = pi.as_mut_ptr();
for i in 0..4_usize {
#[allow(clippy::cast_possible_truncation)]
let val = i as u32 * 10;
unsafe { base.add(i).cast::<u32>().write(val) };
mark(&pi, i);
}
let v = unsafe { pi.into_vec() };
assert_eq!(v, alloc::vec![0, 10, 20, 30]);
}
#[test]
fn partial_init_drops_initialized_on_partial_error() {
use alloc::sync::Arc;
use core::sync::atomic::{AtomicUsize, Ordering as Ord};
struct Dropper(Arc<AtomicUsize>);
impl Drop for Dropper {
fn drop(&mut self) {
self.0.fetch_add(1, Ord::SeqCst);
}
}
let counter = Arc::new(AtomicUsize::new(0));
let mut pi: PartialInit<Dropper> = PartialInit::new(3);
let base = pi.as_mut_ptr();
unsafe {
base.add(0)
.cast::<Dropper>()
.write(Dropper(Arc::clone(&counter)));
}
mark(&pi, 0);
unsafe {
base.add(2)
.cast::<Dropper>()
.write(Dropper(Arc::clone(&counter)));
}
mark(&pi, 2);
drop(pi);
assert_eq!(counter.load(Ord::SeqCst), 2);
}
#[test]
fn partial_init_empty() {
let mut pi: PartialInit<u32> = PartialInit::new(0);
let _ = pi.as_mut_ptr(); let v = unsafe { pi.into_vec() };
assert_eq!(v.len(), 0);
}
#[test]
fn partial_init_into_vec_with_compressed_vector() {
let hash: Arc<str> = Arc::from("test-hash");
let cv0 = CompressedVector::new(
alloc::vec![1_u8, 2_u8].into_boxed_slice(),
None,
Arc::clone(&hash),
2,
4,
)
.unwrap();
let cv1 = CompressedVector::new(
alloc::vec![3_u8, 4_u8].into_boxed_slice(),
None,
Arc::clone(&hash),
2,
4,
)
.unwrap();
let mut pi: PartialInit<CompressedVector> = PartialInit::new(2);
let base = pi.as_mut_ptr();
unsafe { base.add(0).cast::<CompressedVector>().write(cv0.clone()) };
mark(&pi, 0);
unsafe { base.add(1).cast::<CompressedVector>().write(cv1.clone()) };
mark(&pi, 1);
let v = unsafe { pi.into_vec() };
assert_eq!(v.len(), 2);
assert_eq!(v.first().unwrap().indices(), cv0.indices());
assert_eq!(v.get(1).unwrap().indices(), cv1.indices());
}
}