use alloc::vec::Vec;
use core::alloc::Layout;
use core::ptr::NonNull;
#[derive(Debug)]
pub(crate) struct ListRope {
chunks: Vec<NonNull<u8>>,
element_layout: Layout,
elements_per_chunk: usize,
len: usize,
initialized_count: usize,
}
impl ListRope {
const DEFAULT_CHUNK_CAPACITY: usize = 16;
pub fn new(element_layout: Layout) -> Self {
assert!(
element_layout.size() > 0,
"ListRope should not be used for zero-sized types"
);
Self {
chunks: Vec::new(),
element_layout,
elements_per_chunk: Self::DEFAULT_CHUNK_CAPACITY,
len: 0,
initialized_count: 0,
}
}
#[cfg(test)]
pub fn with_chunk_capacity(element_layout: Layout, elements_per_chunk: usize) -> Self {
assert!(
element_layout.size() > 0,
"ListRope should not be used for zero-sized types"
);
assert!(elements_per_chunk > 0, "elements_per_chunk must be > 0");
Self {
chunks: Vec::new(),
element_layout,
elements_per_chunk,
len: 0,
initialized_count: 0,
}
}
pub fn len(&self) -> usize {
self.len
}
#[allow(dead_code)]
pub fn is_empty(&self) -> bool {
self.len == 0
}
#[allow(dead_code)]
pub fn element_layout(&self) -> Layout {
self.element_layout
}
pub fn push_uninit(&mut self) -> NonNull<u8> {
let chunk_idx = self.len / self.elements_per_chunk;
let idx_in_chunk = self.len % self.elements_per_chunk;
if chunk_idx >= self.chunks.len() {
let chunk = self.allocate_chunk();
self.chunks.push(chunk);
}
let chunk_ptr = self.chunks[chunk_idx];
let element_ptr = unsafe {
chunk_ptr
.as_ptr()
.add(idx_in_chunk * self.element_layout.size())
};
self.len += 1;
unsafe { NonNull::new_unchecked(element_ptr) }
}
pub fn mark_last_initialized(&mut self) {
debug_assert!(
self.initialized_count < self.len,
"mark_last_initialized called but no uninitialized elements"
);
self.initialized_count += 1;
}
pub fn initialized_count(&self) -> usize {
self.initialized_count
}
#[allow(dead_code)]
pub unsafe fn get_ptr(&self, index: usize) -> NonNull<u8> {
debug_assert!(index < self.len, "index out of bounds");
let chunk_idx = index / self.elements_per_chunk;
let idx_in_chunk = index % self.elements_per_chunk;
let chunk_ptr = self.chunks[chunk_idx];
unsafe {
let element_ptr = chunk_ptr
.as_ptr()
.add(idx_in_chunk * self.element_layout.size());
NonNull::new_unchecked(element_ptr)
}
}
#[allow(dead_code)]
pub unsafe fn iter_initialized(&self) -> impl Iterator<Item = NonNull<u8>> + '_ {
(0..self.initialized_count).map(|i| {
let chunk_idx = i / self.elements_per_chunk;
let idx_in_chunk = i % self.elements_per_chunk;
let chunk_ptr = self.chunks[chunk_idx];
unsafe {
let element_ptr = chunk_ptr
.as_ptr()
.add(idx_in_chunk * self.element_layout.size());
NonNull::new_unchecked(element_ptr)
}
})
}
fn allocate_chunk(&self) -> NonNull<u8> {
let chunk_size = self.element_layout.size() * self.elements_per_chunk;
let chunk_layout =
Layout::from_size_align(chunk_size, self.element_layout.align()).unwrap();
let ptr = unsafe { alloc::alloc::alloc(chunk_layout) };
NonNull::new(ptr).expect("allocation failed")
}
fn deallocate_chunk(&self, chunk: NonNull<u8>) {
let chunk_size = self.element_layout.size() * self.elements_per_chunk;
let chunk_layout =
Layout::from_size_align(chunk_size, self.element_layout.align()).unwrap();
unsafe {
alloc::alloc::dealloc(chunk.as_ptr(), chunk_layout);
}
}
#[allow(dead_code)]
pub unsafe fn drop_all(&mut self, drop_fn: Option<unsafe fn(*mut u8)>) {
if let Some(drop_fn) = drop_fn {
for i in 0..self.initialized_count {
let chunk_idx = i / self.elements_per_chunk;
let idx_in_chunk = i % self.elements_per_chunk;
let chunk_ptr = self.chunks[chunk_idx];
unsafe {
let element_ptr = chunk_ptr
.as_ptr()
.add(idx_in_chunk * self.element_layout.size());
drop_fn(element_ptr);
}
}
}
let chunks: Vec<_> = self.chunks.drain(..).collect();
for chunk in chunks {
self.deallocate_chunk(chunk);
}
self.len = 0;
self.initialized_count = 0;
}
pub unsafe fn drain_into<F>(&mut self, mut move_fn: F)
where
F: FnMut(NonNull<u8>),
{
for i in 0..self.initialized_count {
let chunk_idx = i / self.elements_per_chunk;
let idx_in_chunk = i % self.elements_per_chunk;
let chunk_ptr = self.chunks[chunk_idx];
unsafe {
let element_ptr = chunk_ptr
.as_ptr()
.add(idx_in_chunk * self.element_layout.size());
move_fn(NonNull::new_unchecked(element_ptr));
}
}
let chunks: Vec<_> = self.chunks.drain(..).collect();
for chunk in chunks {
self.deallocate_chunk(chunk);
}
self.len = 0;
self.initialized_count = 0;
}
}
impl Drop for ListRope {
fn drop(&mut self) {
if !self.chunks.is_empty() {
#[cfg(all(debug_assertions, feature = "std"))]
if self.initialized_count > 0 {
eprintln!(
"ListRope dropped with {} initialized elements - potential memory leak",
self.initialized_count
);
}
let chunks: Vec<_> = self.chunks.drain(..).collect();
for chunk in chunks {
self.deallocate_chunk(chunk);
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_rope_basic() {
let layout = Layout::new::<u64>();
let mut rope = ListRope::new(layout);
assert_eq!(rope.len(), 0);
assert!(rope.is_empty());
let ptr1 = rope.push_uninit();
unsafe {
ptr1.cast::<u64>().as_ptr().write(42);
}
rope.mark_last_initialized();
let ptr2 = rope.push_uninit();
unsafe {
ptr2.cast::<u64>().as_ptr().write(99);
}
rope.mark_last_initialized();
assert_eq!(rope.len(), 2);
assert_eq!(rope.initialized_count(), 2);
unsafe {
assert_eq!(ptr1.cast::<u64>().as_ptr().read(), 42);
assert_eq!(ptr2.cast::<u64>().as_ptr().read(), 99);
}
let mut values = Vec::new();
unsafe {
rope.drain_into(|ptr| {
values.push(ptr.cast::<u64>().as_ptr().read());
});
}
assert_eq!(values, vec![42, 99]);
assert_eq!(rope.len(), 0);
}
#[test]
fn test_rope_multiple_chunks() {
let layout = Layout::new::<u32>();
let mut rope = ListRope::with_chunk_capacity(layout, 4);
let mut ptrs = Vec::new();
for i in 0..10u32 {
let ptr = rope.push_uninit();
unsafe {
ptr.cast::<u32>().as_ptr().write(i);
}
rope.mark_last_initialized();
ptrs.push(ptr);
}
assert_eq!(rope.len(), 10);
assert_eq!(rope.chunks.len(), 3);
for (i, ptr) in ptrs.iter().enumerate() {
unsafe {
assert_eq!(ptr.cast::<u32>().as_ptr().read(), i as u32);
}
}
let mut values = Vec::new();
unsafe {
rope.drain_into(|ptr| {
values.push(ptr.cast::<u32>().as_ptr().read());
});
}
assert_eq!(values, (0..10).collect::<Vec<_>>());
}
#[test]
fn test_rope_pointer_stability_across_chunks() {
let layout = Layout::new::<u64>();
let mut rope = ListRope::with_chunk_capacity(layout, 2);
let ptr1 = rope.push_uninit();
unsafe {
ptr1.cast::<u64>().as_ptr().write(111);
}
rope.mark_last_initialized();
let ptr2 = rope.push_uninit();
unsafe {
ptr2.cast::<u64>().as_ptr().write(222);
}
rope.mark_last_initialized();
let ptr3 = rope.push_uninit();
unsafe {
ptr3.cast::<u64>().as_ptr().write(333);
}
rope.mark_last_initialized();
unsafe {
assert_eq!(ptr1.cast::<u64>().as_ptr().read(), 111);
assert_eq!(ptr2.cast::<u64>().as_ptr().read(), 222);
assert_eq!(ptr3.cast::<u64>().as_ptr().read(), 333);
}
unsafe {
rope.drop_all(None); }
}
#[test]
fn test_rope_with_drop() {
use core::sync::atomic::{AtomicUsize, Ordering};
static DROP_COUNT: AtomicUsize = AtomicUsize::new(0);
#[repr(C)]
struct DropCounter(u64);
impl Drop for DropCounter {
fn drop(&mut self) {
DROP_COUNT.fetch_add(1, Ordering::SeqCst);
}
}
DROP_COUNT.store(0, Ordering::SeqCst);
let layout = Layout::new::<DropCounter>();
let mut rope = ListRope::with_chunk_capacity(layout, 4);
for i in 0..5u64 {
let ptr = rope.push_uninit();
unsafe {
ptr.cast::<DropCounter>().as_ptr().write(DropCounter(i));
}
rope.mark_last_initialized();
}
assert_eq!(DROP_COUNT.load(Ordering::SeqCst), 0);
unsafe {
rope.drop_all(Some(|ptr| {
core::ptr::drop_in_place(ptr as *mut DropCounter);
}));
}
assert_eq!(DROP_COUNT.load(Ordering::SeqCst), 5);
}
#[test]
fn test_rope_iter_initialized() {
let layout = Layout::new::<i32>();
let mut rope = ListRope::with_chunk_capacity(layout, 3);
for i in 0..7i32 {
let ptr = rope.push_uninit();
unsafe {
ptr.cast::<i32>().as_ptr().write(i * 10);
}
rope.mark_last_initialized();
}
let values: Vec<i32> = unsafe {
rope.iter_initialized()
.map(|ptr| ptr.cast::<i32>().as_ptr().read())
.collect()
};
assert_eq!(values, vec![0, 10, 20, 30, 40, 50, 60]);
unsafe {
rope.drop_all(None);
}
}
#[test]
fn test_rope_partial_initialization() {
let layout = Layout::new::<u64>();
let mut rope = ListRope::new(layout);
let ptr1 = rope.push_uninit();
unsafe {
ptr1.cast::<u64>().as_ptr().write(1);
}
rope.mark_last_initialized();
let ptr2 = rope.push_uninit();
unsafe {
ptr2.cast::<u64>().as_ptr().write(2);
}
rope.mark_last_initialized();
let _ptr3 = rope.push_uninit();
assert_eq!(rope.len(), 3);
assert_eq!(rope.initialized_count(), 2);
let values: Vec<u64> = unsafe {
rope.iter_initialized()
.map(|ptr| ptr.cast::<u64>().as_ptr().read())
.collect()
};
assert_eq!(values, vec![1, 2]);
unsafe {
rope.drop_all(None);
}
}
}