pub trait Allocator: Sealed {
type Path;
Show 106 methods
// Required methods
fn reserved_bytes(&self) -> usize;
fn reserved_slice(&self) -> &[u8] ⓘ;
unsafe fn reserved_slice_mut(&self) -> &mut [u8] ⓘ;
unsafe fn alloc<T>(&self) -> Result<RefMut<'_, T, Self>, Error>;
fn alloc_aligned_bytes<T>(
&self,
size: u32,
) -> Result<BytesRefMut<'_, Self>, Error>;
fn alloc_bytes(&self, size: u32) -> Result<BytesRefMut<'_, Self>, Error>;
fn allocated(&self) -> usize;
fn raw_mut_ptr(&self) -> *mut u8;
fn raw_ptr(&self) -> *const u8;
unsafe fn clear(&self) -> Result<(), Error>;
unsafe fn dealloc(&self, offset: u32, size: u32) -> bool;
fn discard_freelist(&self) -> Result<u32, Error>;
fn discarded(&self) -> u32;
fn increase_discarded(&self, size: u32);
fn magic_version(&self) -> u16;
fn minimum_segment_size(&self) -> u32;
fn set_minimum_segment_size(&self, size: u32);
fn path(&self) -> Option<&Self::Path>;
unsafe fn offset(&self, ptr: *const u8) -> usize;
fn page_size(&self) -> usize;
fn refs(&self) -> usize;
fn remaining(&self) -> usize;
unsafe fn rewind(&self, pos: ArenaPosition);
fn version(&self) -> u16;
// Provided methods
fn alloc_aligned_bytes_owned<T>(
&self,
size: u32,
) -> Result<BytesMut<Self>, Error> { ... }
fn alloc_bytes_owned(&self, size: u32) -> Result<BytesMut<Self>, Error> { ... }
unsafe fn alloc_owned<T>(&self) -> Result<Owned<T, Self>, Error> { ... }
fn allocated_memory(&self) -> &[u8] ⓘ { ... }
fn capacity(&self) -> usize { ... }
fn data_offset(&self) -> usize { ... }
fn data(&self) -> &[u8] ⓘ { ... }
fn flush(&self) -> Result<()> { ... }
fn flush_async(&self) -> Result<()> { ... }
fn flush_range(&self, offset: usize, len: usize) -> Result<()> { ... }
fn flush_async_range(&self, offset: usize, len: usize) -> Result<()> { ... }
fn flush_header(&self) -> Result<()> { ... }
fn flush_async_header(&self) -> Result<()> { ... }
fn flush_header_and_range(&self, offset: usize, len: usize) -> Result<()> { ... }
fn flush_async_header_and_range(
&self,
offset: usize,
len: usize,
) -> Result<()> { ... }
unsafe fn get_pointer(&self, offset: usize) -> *const u8 { ... }
unsafe fn get_pointer_mut(&self, offset: usize) -> *mut u8 { ... }
unsafe fn get_aligned_pointer<T>(&self, offset: usize) -> *const T { ... }
unsafe fn get_aligned_pointer_mut<T>(&self, offset: usize) -> NonNull<T> { ... }
unsafe fn get_bytes(&self, offset: usize, size: usize) -> &[u8] ⓘ { ... }
fn get_u8(&self, offset: usize) -> Result<u8, Error> { ... }
fn get_i8(&self, offset: usize) -> Result<i8, Error> { ... }
unsafe fn get_u8_unchecked(&self, offset: usize) -> u8 { ... }
unsafe fn get_i8_unchecked(&self, offset: usize) -> i8 { ... }
fn get_u16_be(&self, offset: usize) -> Result<u16, Error> { ... }
unsafe fn get_u16_be_unchecked(&self, offset: usize) -> u16 { ... }
fn get_u16_le(&self, offset: usize) -> Result<u16, Error> { ... }
unsafe fn get_u16_le_unchecked(&self, offset: usize) -> u16 { ... }
fn get_u32_be(&self, offset: usize) -> Result<u32, Error> { ... }
unsafe fn get_u32_be_unchecked(&self, offset: usize) -> u32 { ... }
fn get_u32_le(&self, offset: usize) -> Result<u32, Error> { ... }
unsafe fn get_u32_le_unchecked(&self, offset: usize) -> u32 { ... }
fn get_u64_be(&self, offset: usize) -> Result<u64, Error> { ... }
unsafe fn get_u64_be_unchecked(&self, offset: usize) -> u64 { ... }
fn get_u64_le(&self, offset: usize) -> Result<u64, Error> { ... }
unsafe fn get_u64_le_unchecked(&self, offset: usize) -> u64 { ... }
fn get_u128_be(&self, offset: usize) -> Result<u128, Error> { ... }
unsafe fn get_u128_be_unchecked(&self, offset: usize) -> u128 { ... }
fn get_u128_le(&self, offset: usize) -> Result<u128, Error> { ... }
unsafe fn get_u128_le_unchecked(&self, offset: usize) -> u128 { ... }
fn get_i16_be(&self, offset: usize) -> Result<i16, Error> { ... }
unsafe fn get_i16_be_unchecked(&self, offset: usize) -> i16 { ... }
fn get_i16_le(&self, offset: usize) -> Result<i16, Error> { ... }
unsafe fn get_i16_le_unchecked(&self, offset: usize) -> i16 { ... }
fn get_i32_be(&self, offset: usize) -> Result<i32, Error> { ... }
unsafe fn get_i32_be_unchecked(&self, offset: usize) -> i32 { ... }
fn get_i32_le(&self, offset: usize) -> Result<i32, Error> { ... }
unsafe fn get_i32_le_unchecked(&self, offset: usize) -> i32 { ... }
fn get_i64_be(&self, offset: usize) -> Result<i64, Error> { ... }
unsafe fn get_i64_be_unchecked(&self, offset: usize) -> i64 { ... }
fn get_i64_le(&self, offset: usize) -> Result<i64, Error> { ... }
unsafe fn get_i64_le_unchecked(&self, offset: usize) -> i64 { ... }
fn get_i128_be(&self, offset: usize) -> Result<i128, Error> { ... }
unsafe fn get_i128_be_unchecked(&self, offset: usize) -> i128 { ... }
fn get_i128_le(&self, offset: usize) -> Result<i128, Error> { ... }
unsafe fn get_i128_le_unchecked(&self, offset: usize) -> i128 { ... }
fn get_i16_varint(&self, offset: usize) -> Result<(usize, i16), Error> { ... }
fn get_i32_varint(&self, offset: usize) -> Result<(usize, i32), Error> { ... }
fn get_i64_varint(&self, offset: usize) -> Result<(usize, i64), Error> { ... }
fn get_i128_varint(&self, offset: usize) -> Result<(usize, i128), Error> { ... }
fn get_u16_varint(&self, offset: usize) -> Result<(usize, u16), Error> { ... }
fn get_u32_varint(&self, offset: usize) -> Result<(usize, u32), Error> { ... }
fn get_u64_varint(&self, offset: usize) -> Result<(usize, u64), Error> { ... }
fn get_u128_varint(&self, offset: usize) -> Result<(usize, u128), Error> { ... }
unsafe fn get_bytes_mut(&self, offset: usize, size: usize) -> &mut [u8] ⓘ { ... }
fn is_map(&self) -> bool { ... }
fn is_ondisk(&self) -> bool { ... }
fn is_inmemory(&self) -> bool { ... }
fn is_map_anon(&self) -> bool { ... }
fn is_map_file(&self) -> bool { ... }
fn lock_exclusive(&self) -> Result<()> { ... }
fn lock_shared(&self) -> Result<()> { ... }
fn memory(&self) -> &[u8] ⓘ { ... }
fn checksum<S: BuildChecksumer>(&self, cks: &S) -> u64 { ... }
fn unify(&self) -> bool { ... }
unsafe fn mlock(&self, offset: usize, len: usize) -> Result<()> { ... }
unsafe fn munlock(&self, offset: usize, len: usize) -> Result<()> { ... }
fn read_only(&self) -> bool { ... }
fn remove_on_drop(&self, remove_on_drop: bool) { ... }
fn try_lock_exclusive(&self) -> Result<()> { ... }
fn try_lock_shared(&self) -> Result<()> { ... }
fn unlock(&self) -> Result<()> { ... }
}Expand description
A trait for easily interacting with the sync and unsync allocator allocators.
Required Associated Types§
Required Methods§
Sourcefn reserved_bytes(&self) -> usize
fn reserved_bytes(&self) -> usize
Returns the number of bytes that are reserved by the allocator.
Sourcefn reserved_slice(&self) -> &[u8] ⓘ
fn reserved_slice(&self) -> &[u8] ⓘ
Returns the reserved bytes of the allocator specified in the Options::with_reserved.
Sourceunsafe fn reserved_slice_mut(&self) -> &mut [u8] ⓘ
unsafe fn reserved_slice_mut(&self) -> &mut [u8] ⓘ
Returns the mutable reserved bytes of the allocator specified in the Options::with_reserved.
§Safety
- The caller need to make sure there is no data-race
§Panic
- If in read-only mode, and num of reserved bytes is greater than 0, this method will panic.
Sourceunsafe fn alloc<T>(&self) -> Result<RefMut<'_, T, Self>, Error>
unsafe fn alloc<T>(&self) -> Result<RefMut<'_, T, Self>, Error>
Allocates a T in the allocator.
§Safety
-
If
Tneeds to be dropped and callers invokeRefMut::detach, then the caller must ensure that theTis dropped before the allocator is dropped. Otherwise, it will lead to memory leaks. -
If this is file backed allocator, then
Tmust be recoverable from bytes.- Types require allocation are not recoverable.
- Pointers are not recoverable, like
*const T,*mut T,NonNulland any structs contains pointers, although those types are on stack, but they cannot be recovered, when reopens the file.
§Examples
§Memory leak
The following example demonstrates the memory leak when the T is a heap allocated type and detached.
let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
{
let mut data = arena.alloc::<Vec<u8>>().unwrap();
data.detach();
data.write(vec![1, 2, 3]);
}
drop(arena); // memory leak, the `Vec<u8>` is not dropped.§Undefined behavior
The following example demonstrates the undefined behavior when the T is not recoverable.
struct TypeOnHeap {
data: Vec<u8>,
}
let arena = Options::new().with_create_new(1000).with_read(true).with_write(true).map_mut::<Arena, _>("path/to/file").unwrap();
let mut data = arena.alloc::<TypeOnHeap>().unwrap();
data.detach();
data.write(TypeOnHeap { data: vec![1, 2, 3] });
let offset = data.offset();
drop(arena);
// reopen the file
let arena = Options::new().with_read(true).map::<Arena, _>("path/to/file").unwrap();
let foo = &*arena.get_aligned_pointer::<TypeOnHeap>(offset as usize);
let b = foo.data[1]; // undefined behavior, the `data`'s pointer stored in the file is not valid anymore.§Good practice
Some examples about how to use this method correctly.
§Heap allocated type with carefull memory management
let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
// Do not invoke detach, so when the data is dropped, the drop logic will be handled by the allocator.
// automatically.
{
let mut data = arena.alloc::<Vec<u8>>().unwrap();
data.write(vec![1, 2, 3]);
}
let mut detached_data = arena.alloc::<Vec<u8>>().unwrap();
detached_data.detach();
detached_data.write(vec![4, 5, 6]);
// some other logic
core::ptr::drop_in_place(detached_data.as_mut()); // drop the `Vec` manually.
drop(arena); // it is safe, the `Vec` is already dropped.§Recoverable type with file backed allocator
struct Recoverable {
field1: u64,
field2: AtomicU32,
}
let arena = Options::new().with_create_new(1000).with_read(true).with_write(true).map_mut::<Arena, _>("path/to/file").unwrap();
let mut data = arena.alloc::<Recoverable>().unwrap();
data.write(Recoverable { field1: 10, field2: AtomicU32::new(20) });
data.detach();
let offset = data.offset();
drop(arena);
// reopen the file
let arena = Options::new().with_read(true).map::<Arena, _>("path/to/file").unwrap();
let foo = &*arena.get_aligned_pointer::<Recoverable>(offset as usize);
assert_eq!(foo.field1, 10);
assert_eq!(foo.field2.load(Ordering::Acquire), 20);Sourcefn alloc_aligned_bytes<T>(
&self,
size: u32,
) -> Result<BytesRefMut<'_, Self>, Error>
fn alloc_aligned_bytes<T>( &self, size: u32, ) -> Result<BytesRefMut<'_, Self>, Error>
Sourcefn alloc_bytes(&self, size: u32) -> Result<BytesRefMut<'_, Self>, Error>
fn alloc_bytes(&self, size: u32) -> Result<BytesRefMut<'_, Self>, Error>
Allocates a slice of memory in the allocator.
The BytesRefMut is zeroed out.
If you want a BytesMut, see alloc_bytes_owned.
Sourcefn allocated(&self) -> usize
fn allocated(&self) -> usize
Returns the number of bytes allocated by the allocator.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let allocated = arena.allocated();Sourcefn raw_mut_ptr(&self) -> *mut u8
fn raw_mut_ptr(&self) -> *mut u8
Returns the start pointer of the main memory of the allocator.
Sourceunsafe fn clear(&self) -> Result<(), Error>
unsafe fn clear(&self) -> Result<(), Error>
Clear the allocator.
§Safety
- The current pointers get from the allocator cannot be used anymore after calling this method.
- This method is not thread-safe.
§Examples
Undefine behavior:
let mut data = arena.alloc::<Vec<u8>>().unwrap();
arena.clear();
data.write(vec![1, 2, 3]); // undefined behaviorGood practice:
use rarena_allocator::{sync::Arena, Options, Allocator};
let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
unsafe {
let mut data = arena.alloc::<Vec<u8>>().unwrap();
data.write(vec![1, 2, 3]);
arena.clear().unwrap();
}
Sourceunsafe fn dealloc(&self, offset: u32, size: u32) -> bool
unsafe fn dealloc(&self, offset: u32, size: u32) -> bool
Deallocates the memory at the given offset and size, the offset..offset + size will be made to a segment,
returns true if the deallocation is successful.
§Safety
- you must ensure the same
offset..offset + sizeis not deallocated twice. offsetmust be larger than theAllocator::data_offset.offset + sizemust be less than theAllocator::allocated.
Sourcefn discard_freelist(&self) -> Result<u32, Error>
fn discard_freelist(&self) -> Result<u32, Error>
Discards all freelist nodes in the allocator.
Returns the number of bytes discarded.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
arena.discard_freelist();Sourcefn discarded(&self) -> u32
fn discarded(&self) -> u32
Returns the number of bytes discarded by the allocator.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let discarded = arena.discarded();Sourcefn increase_discarded(&self, size: u32)
fn increase_discarded(&self, size: u32)
Forcelly increases the discarded bytes.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
arena.increase_discarded(100);Sourcefn magic_version(&self) -> u16
fn magic_version(&self) -> u16
Returns the magic version of the allocator. This value can be used to check the compatibility for application using
Allocator.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let magic_version = arena.magic_version();Sourcefn minimum_segment_size(&self) -> u32
fn minimum_segment_size(&self) -> u32
Returns the minimum segment size of the allocator.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let min_segment_size = arena.minimum_segment_size();Sourcefn set_minimum_segment_size(&self, size: u32)
fn set_minimum_segment_size(&self, size: u32)
Sets the minimum segment size of the allocator.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
arena.set_minimum_segment_size(100);Sourcefn path(&self) -> Option<&Self::Path>
Available on crate feature memmap and non-target_family="wasm" only.
fn path(&self) -> Option<&Self::Path>
memmap and non-target_family="wasm" only.Returns the path of the mmap file, only returns Some when the ARENA is backed by a mmap file.
§Example
let path = arena.path();Sourcefn page_size(&self) -> usize
fn page_size(&self) -> usize
Returns the page size.
If in no-std environment, then this method will return 4096.
Otherwise, it will return the system’s page size.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let page_size = arena.page_size();Sourcefn refs(&self) -> usize
fn refs(&self) -> usize
Returns the number of references to the allocator.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let refs = arena.refs();Sourcefn remaining(&self) -> usize
fn remaining(&self) -> usize
Returns the number of bytes remaining bytes can be allocated by the allocator.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let remaining = arena.remaining();Sourceunsafe fn rewind(&self, pos: ArenaPosition)
unsafe fn rewind(&self, pos: ArenaPosition)
Set back the allocator’s main memory cursor to the given position.
§Safety
- If the current position is larger than the given position, then the memory between the current position and the given position will be reclaimed, so must ensure the memory chunk between the current position and the given position will not be accessed anymore.
- This method is not thread safe.
Provided Methods§
Sourcefn alloc_bytes_owned(&self, size: u32) -> Result<BytesMut<Self>, Error>
fn alloc_bytes_owned(&self, size: u32) -> Result<BytesMut<Self>, Error>
Allocates an owned slice of memory in the allocator.
The cost of this method is an extra atomic operation, compared to alloc_bytes.
Sourceunsafe fn alloc_owned<T>(&self) -> Result<Owned<T, Self>, Error>
unsafe fn alloc_owned<T>(&self) -> Result<Owned<T, Self>, Error>
Allocates a T in the allocator. Like alloc, but returns an Owned.
The cost is one more atomic operation than alloc.
§Safety
- See
allocfor safety.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
unsafe {
let mut data = arena.alloc_owned::<u64>().unwrap();
data.write(10);
assert_eq!(*data.as_ref(), 10);
}Sourcefn allocated_memory(&self) -> &[u8] ⓘ
fn allocated_memory(&self) -> &[u8] ⓘ
Returns the whole main memory of the allocator as a byte slice.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let memory = arena.allocated_memory();Sourcefn capacity(&self) -> usize
fn capacity(&self) -> usize
Returns the capacity of the allocator.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let capacity = arena.capacity();Sourcefn data_offset(&self) -> usize
fn data_offset(&self) -> usize
Returns the data offset of the allocator. The offset is the end of the reserved bytes of the allocator.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let data_offset = arena.data_offset();Sourcefn data(&self) -> &[u8] ⓘ
fn data(&self) -> &[u8] ⓘ
Returns the data section of the allocator as a byte slice, header is not included.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let data = arena.data();Sourcefn flush(&self) -> Result<()>
Available on crate feature memmap and non-target_family="wasm" only.
fn flush(&self) -> Result<()>
memmap and non-target_family="wasm" only.Flushes the memory-mapped file to disk.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let mut arena = unsafe { Options::new().with_create_new(true).with_read(true).with_write(true).with_capacity(100).map_mut::<Arena, _>(&path).unwrap() };
arena.flush().unwrap();
Sourcefn flush_async(&self) -> Result<()>
Available on crate feature memmap and non-target_family="wasm" only.
fn flush_async(&self) -> Result<()>
memmap and non-target_family="wasm" only.Flushes the memory-mapped file to disk asynchronously.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let mut arena = unsafe { Options::new().with_create_new(true).with_read(true).with_write(true).with_capacity(100).map_mut::<Arena, _>(&path).unwrap() };
arena.flush_async().unwrap();
Sourcefn flush_range(&self, offset: usize, len: usize) -> Result<()>
Available on crate feature memmap and non-target_family="wasm" only.
fn flush_range(&self, offset: usize, len: usize) -> Result<()>
memmap and non-target_family="wasm" only.Flushes outstanding memory map modifications in the range to disk.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let mut arena = unsafe { Options::new().with_create_new(true).with_read(true).with_write(true).with_capacity(100).map_mut::<Arena, _>(&path).unwrap() };
arena.flush_range(0, 100).unwrap();
Sourcefn flush_async_range(&self, offset: usize, len: usize) -> Result<()>
Available on crate feature memmap and non-target_family="wasm" only.
fn flush_async_range(&self, offset: usize, len: usize) -> Result<()>
memmap and non-target_family="wasm" only.Asynchronously flushes outstanding memory map modifications in the range to disk.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let mut arena = unsafe { Options::new().with_create_new(true).with_read(true).with_write(true).with_capacity(100).map_mut::<Arena, _>(&path).unwrap() };
arena.flush_async_range(0, 100).unwrap();
Sourcefn flush_header(&self) -> Result<()>
Available on crate feature memmap and non-target_family="wasm" only.
fn flush_header(&self) -> Result<()>
memmap and non-target_family="wasm" only.Flushes outstanding memory map modifications in Allocator’s header to disk.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let mut arena = unsafe { Options::new().with_create_new(true).with_read(true).with_write(true).with_capacity(100).map_mut::<Arena, _>(&path).unwrap() };
arena.flush_header().unwrap();
Sourcefn flush_async_header(&self) -> Result<()>
Available on crate feature memmap and non-target_family="wasm" only.
fn flush_async_header(&self) -> Result<()>
memmap and non-target_family="wasm" only.Asynchronously flushes outstanding memory map modifications Allocator’s header to disk.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let mut arena = unsafe { Options::new().with_create_new(true).with_read(true).with_write(true).with_capacity(100).map_mut::<Arena, _>(&path).unwrap() };
arena.flush_async_header().unwrap();
Sourcefn flush_header_and_range(&self, offset: usize, len: usize) -> Result<()>
Available on crate feature memmap and non-target_family="wasm" only.
fn flush_header_and_range(&self, offset: usize, len: usize) -> Result<()>
memmap and non-target_family="wasm" only.Flushes outstanding memory map modifications in the range and Allocator’s header to disk.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let mut arena = unsafe { Options::new().with_create_new(true).with_read(true).with_write(true).with_capacity(100).map_mut::<Arena, _>(&path).unwrap() };
arena.flush_header_and_range(0, 100).unwrap();
Sourcefn flush_async_header_and_range(&self, offset: usize, len: usize) -> Result<()>
Available on crate feature memmap and non-target_family="wasm" only.
fn flush_async_header_and_range(&self, offset: usize, len: usize) -> Result<()>
memmap and non-target_family="wasm" only.Asynchronously flushes outstanding memory map modifications in the range and Allocator’s header to disk.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let mut arena = unsafe { Options::new().with_create_new(true).with_read(true).with_write(true).with_capacity(100).map_mut::<Arena, _>(&path).unwrap() };
arena.flush_async_header_and_range(0, 100).unwrap();
Sourceunsafe fn get_pointer(&self, offset: usize) -> *const u8
unsafe fn get_pointer(&self, offset: usize) -> *const u8
Returns a pointer to the memory at the given offset.
§Safety
offsetmust be less than the capacity of the allocator.
Sourceunsafe fn get_pointer_mut(&self, offset: usize) -> *mut u8
unsafe fn get_pointer_mut(&self, offset: usize) -> *mut u8
Sourceunsafe fn get_aligned_pointer<T>(&self, offset: usize) -> *const T
unsafe fn get_aligned_pointer<T>(&self, offset: usize) -> *const T
Returns an aligned pointer to the memory at the given offset.
§Safety
offset..offset + mem::size_of::<T>() + paddingmust be allocated memory.offsetmust be less than the capacity of the allocator.
Sourceunsafe fn get_aligned_pointer_mut<T>(&self, offset: usize) -> NonNull<T>
unsafe fn get_aligned_pointer_mut<T>(&self, offset: usize) -> NonNull<T>
Returns an aligned pointer to the memory at the given offset. If the allocator is read-only, then this method will return a null pointer.
§Safety
offset..offset + mem::size_of::<T>() + paddingmust be allocated memory.offsetmust be less than the capacity of the allocator.
§Panic
- If the allocator is read-only, then this method will panic.
Sourceunsafe fn get_bytes(&self, offset: usize, size: usize) -> &[u8] ⓘ
unsafe fn get_bytes(&self, offset: usize, size: usize) -> &[u8] ⓘ
Returns a bytes slice from the allocator.
§Safety
offset..offset + sizemust be allocated memory.offsetmust be less than the capacity of the allocator.sizemust be less than the capacity of the allocator.offset + sizemust be less than the capacity of the allocator.
Sourceunsafe fn get_u8_unchecked(&self, offset: usize) -> u8
unsafe fn get_u8_unchecked(&self, offset: usize) -> u8
Returns a u8 from the allocator without bounds checking.
§Safety
offset + sizemust be within the allocated memory of the allocator.
Sourceunsafe fn get_i8_unchecked(&self, offset: usize) -> i8
unsafe fn get_i8_unchecked(&self, offset: usize) -> i8
Returns a i8 from the allocator without bounds checking.
§Safety
offset + sizemust be within the allocated memory of the allocator.
Sourceunsafe fn get_u16_be_unchecked(&self, offset: usize) -> u16
unsafe fn get_u16_be_unchecked(&self, offset: usize) -> u16
Returns a u16 from the allocator without bounds checking.
§Safety
offset..offset + sizemust be within allocated memory.
Sourceunsafe fn get_u16_le_unchecked(&self, offset: usize) -> u16
unsafe fn get_u16_le_unchecked(&self, offset: usize) -> u16
Returns a u16 from the allocator without bounds checking.
§Safety
offset..offset + sizemust be within allocated memory.
Sourceunsafe fn get_u32_be_unchecked(&self, offset: usize) -> u32
unsafe fn get_u32_be_unchecked(&self, offset: usize) -> u32
Returns a u32 from the allocator without bounds checking.
§Safety
offset..offset + sizemust be within allocated memory.
Sourceunsafe fn get_u32_le_unchecked(&self, offset: usize) -> u32
unsafe fn get_u32_le_unchecked(&self, offset: usize) -> u32
Returns a u32 from the allocator without bounds checking.
§Safety
offset..offset + sizemust be within allocated memory.
Sourceunsafe fn get_u64_be_unchecked(&self, offset: usize) -> u64
unsafe fn get_u64_be_unchecked(&self, offset: usize) -> u64
Returns a u64 from the allocator without bounds checking.
§Safety
offset..offset + sizemust be within allocated memory.
Sourceunsafe fn get_u64_le_unchecked(&self, offset: usize) -> u64
unsafe fn get_u64_le_unchecked(&self, offset: usize) -> u64
Returns a u64 from the allocator without bounds checking.
§Safety
offset..offset + sizemust be within allocated memory.
Sourceunsafe fn get_u128_be_unchecked(&self, offset: usize) -> u128
unsafe fn get_u128_be_unchecked(&self, offset: usize) -> u128
Returns a u128 from the allocator without bounds checking.
§Safety
offset..offset + sizemust be within allocated memory.
Sourceunsafe fn get_u128_le_unchecked(&self, offset: usize) -> u128
unsafe fn get_u128_le_unchecked(&self, offset: usize) -> u128
Returns a u128 from the allocator without bounds checking.
§Safety
offset..offset + sizemust be within allocated memory.
Sourceunsafe fn get_i16_be_unchecked(&self, offset: usize) -> i16
unsafe fn get_i16_be_unchecked(&self, offset: usize) -> i16
Returns a i16 from the allocator without bounds checking.
§Safety
offset..offset + sizemust be within allocated memory.
Sourceunsafe fn get_i16_le_unchecked(&self, offset: usize) -> i16
unsafe fn get_i16_le_unchecked(&self, offset: usize) -> i16
Returns a i16 from the allocator without bounds checking.
§Safety
offset..offset + sizemust be within allocated memory.
Sourceunsafe fn get_i32_be_unchecked(&self, offset: usize) -> i32
unsafe fn get_i32_be_unchecked(&self, offset: usize) -> i32
Returns a i32 from the allocator without bounds checking.
§Safety
offset..offset + sizemust be within allocated memory.
Sourceunsafe fn get_i32_le_unchecked(&self, offset: usize) -> i32
unsafe fn get_i32_le_unchecked(&self, offset: usize) -> i32
Returns a i32 from the allocator without bounds checking.
§Safety
offset..offset + sizemust be within allocated memory.
Sourceunsafe fn get_i64_be_unchecked(&self, offset: usize) -> i64
unsafe fn get_i64_be_unchecked(&self, offset: usize) -> i64
Returns a i64 from the allocator without bounds checking.
§Safety
offset..offset + sizemust be within allocated memory.
Sourceunsafe fn get_i64_le_unchecked(&self, offset: usize) -> i64
unsafe fn get_i64_le_unchecked(&self, offset: usize) -> i64
Returns a i64 from the allocator without bounds checking.
§Safety
offset..offset + sizemust be within allocated memory.
Sourceunsafe fn get_i128_be_unchecked(&self, offset: usize) -> i128
unsafe fn get_i128_be_unchecked(&self, offset: usize) -> i128
Returns a i128 from the allocator without bounds checking.
§Safety
offset..offset + sizemust be within allocated memory.
Sourceunsafe fn get_i128_le_unchecked(&self, offset: usize) -> i128
unsafe fn get_i128_le_unchecked(&self, offset: usize) -> i128
Returns a i128 from the allocator without bounds checking.
§Safety
offset..offset + sizemust be within allocated memory.
Sourcefn get_i16_varint(&self, offset: usize) -> Result<(usize, i16), Error>
fn get_i16_varint(&self, offset: usize) -> Result<(usize, i16), Error>
Returns a i16 in LEB128 format from the allocator at the given offset.
§Safety
offsetmust be within the allocated memory of the allocator.
Sourcefn get_i32_varint(&self, offset: usize) -> Result<(usize, i32), Error>
fn get_i32_varint(&self, offset: usize) -> Result<(usize, i32), Error>
Returns a i32 in LEB128 format from the allocator at the given offset.
§Safety
offsetmust be within the allocated memory of the allocator.
Sourcefn get_i64_varint(&self, offset: usize) -> Result<(usize, i64), Error>
fn get_i64_varint(&self, offset: usize) -> Result<(usize, i64), Error>
Returns a i64 in LEB128 format from the allocator at the given offset.
§Safety
offsetmust be within the allocated memory of the allocator.
Sourcefn get_i128_varint(&self, offset: usize) -> Result<(usize, i128), Error>
fn get_i128_varint(&self, offset: usize) -> Result<(usize, i128), Error>
Returns a i128 in LEB128 format from the allocator at the given offset.
§Safety
offsetmust be within the allocated memory of the allocator.
Sourcefn get_u16_varint(&self, offset: usize) -> Result<(usize, u16), Error>
fn get_u16_varint(&self, offset: usize) -> Result<(usize, u16), Error>
Returns a u16 in LEB128 format from the allocator at the given offset.
§Safety
offsetmust be within the allocated memory of the allocator.
Sourcefn get_u32_varint(&self, offset: usize) -> Result<(usize, u32), Error>
fn get_u32_varint(&self, offset: usize) -> Result<(usize, u32), Error>
Returns a u32 in LEB128 format from the allocator at the given offset.
§Safety
offsetmust be within the allocated memory of the allocator.
Sourcefn get_u64_varint(&self, offset: usize) -> Result<(usize, u64), Error>
fn get_u64_varint(&self, offset: usize) -> Result<(usize, u64), Error>
Returns a u64 in LEB128 format from the allocator at the given offset.
§Safety
offsetmust be within the allocated memory of the allocator.
Sourcefn get_u128_varint(&self, offset: usize) -> Result<(usize, u128), Error>
fn get_u128_varint(&self, offset: usize) -> Result<(usize, u128), Error>
Returns a u128 in LEB128 format from the allocator at the given offset.
§Safety
offsetmust be within the allocated memory of the allocator.
Sourceunsafe fn get_bytes_mut(&self, offset: usize, size: usize) -> &mut [u8] ⓘ
unsafe fn get_bytes_mut(&self, offset: usize, size: usize) -> &mut [u8] ⓘ
Returns a mutable bytes slice from the allocator. If the allocator is read-only, then this method will return an empty slice.
§Safety
offset..offset + sizemust be allocated memory.offsetmust be less than the capacity of the allocator.sizemust be less than the capacity of the allocator.offset + sizemust be less than the capacity of the allocator.
§Panic
- If the allocator is read-only, then this method will panic.
Sourcefn is_map(&self) -> bool
Available on crate feature memmap and non-target_family="wasm" only.
fn is_map(&self) -> bool
memmap and non-target_family="wasm" only.Returns true if the allocator is created through memory map.
§Example
use rarena_allocator::{sync::Arena, Allocator, Options};
let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let is_map = arena.is_map();
assert_eq!(is_map, false);Sourcefn is_ondisk(&self) -> bool
fn is_ondisk(&self) -> bool
Returns true if the allocator is on disk.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let is_ondisk = arena.is_ondisk();
assert_eq!(is_ondisk, false);Sourcefn is_inmemory(&self) -> bool
fn is_inmemory(&self) -> bool
Returns true if the allocator is in memory.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let is_inmemory = arena.is_inmemory();
assert_eq!(is_inmemory, true);Sourcefn is_map_anon(&self) -> bool
Available on crate feature memmap and non-target_family="wasm" only.
fn is_map_anon(&self) -> bool
memmap and non-target_family="wasm" only.Returns true if the allocator is on-disk and created through memory map.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let arena = Options::new().with_capacity(100).map_anon::<Arena>().unwrap();
let is_map_anon = arena.is_map_anon();
assert_eq!(is_map_anon, true);Sourcefn is_map_file(&self) -> bool
Available on crate feature memmap and non-target_family="wasm" only.
fn is_map_file(&self) -> bool
memmap and non-target_family="wasm" only.Returns true if the allocator is on-disk and created through memory map.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let is_map_file = arena.is_map_file();
assert_eq!(is_map_file, false);Sourcefn lock_exclusive(&self) -> Result<()>
Available on crate feature memmap and non-target_family="wasm" only.
fn lock_exclusive(&self) -> Result<()>
memmap and non-target_family="wasm" only.Locks the underlying file for exclusive access, only works on mmap with a file backend.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let mut arena = unsafe { Options::new().with_create_new(true).with_read(true).with_write(true).with_capacity(100).map_mut::<Arena, _>(&path).unwrap() };
arena.lock_exclusive().unwrap();
Available on crate feature memmap and non-target_family="wasm" only.
memmap and non-target_family="wasm" only.Locks the underlying file for shared access, only works on mmap with a file backend.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let mut arena = unsafe { Options::new().with_create_new(true).with_read(true).with_write(true).with_capacity(100).map_mut::<Arena, _>(&path).unwrap() };
arena.lock_shared().unwrap();
Sourcefn memory(&self) -> &[u8] ⓘ
fn memory(&self) -> &[u8] ⓘ
Returns the whole main memory of the allocator as a byte slice.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let memory = arena.memory();Sourcefn checksum<S: BuildChecksumer>(&self, cks: &S) -> u64
fn checksum<S: BuildChecksumer>(&self, cks: &S) -> u64
Calculates the checksum of the allocated memory (excluding the reserved memory specified by users through Options::with_reserved) of the allocator.
Sourcefn unify(&self) -> bool
fn unify(&self) -> bool
Returns true if the allocator is unify memory layout.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
assert_eq!(arena.unify(), false);
let arena = Options::new().with_capacity(100).with_unify(true).alloc::<Arena>().unwrap();
assert_eq!(arena.unify(), true);Sourceunsafe fn mlock(&self, offset: usize, len: usize) -> Result<()>
Available on crate feature memmap and non-target_family="wasm" only.
unsafe fn mlock(&self, offset: usize, len: usize) -> Result<()>
memmap and non-target_family="wasm" only.mlock(ptr, len)—Lock memory into RAM.
§Safety
This function operates on raw pointers, but it should only be used on memory which the caller owns. Technically, locking memory shouldn’t violate any invariants, but since unlocking it can violate invariants, this function is also unsafe for symmetry.
Some implementations implicitly round the memory region out to the nearest page boundaries, so this function may lock more memory than explicitly requested if the memory isn’t page-aligned. Other implementations fail if the memory isn’t page-aligned.
§References
Sourceunsafe fn munlock(&self, offset: usize, len: usize) -> Result<()>
Available on crate feature memmap and non-target_family="wasm" only.
unsafe fn munlock(&self, offset: usize, len: usize) -> Result<()>
memmap and non-target_family="wasm" only.munlock(ptr, len)—Unlock memory.
§Safety
This function operates on raw pointers, but it should only be used on
memory which the caller owns, to avoid compromising the mlock invariants
of other unrelated code in the process.
Some implementations implicitly round the memory region out to the nearest page boundaries, so this function may unlock more memory than explicitly requested if the memory isn’t page-aligned.
§References
Sourcefn read_only(&self) -> bool
fn read_only(&self) -> bool
Returns true if the arena is read-only.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let read_only = arena.read_only();Sourcefn remove_on_drop(&self, remove_on_drop: bool)
Available on crate feature memmap and non-target_family="wasm" only.
fn remove_on_drop(&self, remove_on_drop: bool)
memmap and non-target_family="wasm" only.Sets remove on drop, only works on mmap with a file backend.
Default is false.
WARNING: Once set to
true, the backed file will be removed when the allocator is dropped, even though the file is opened in read-only mode.
§Example
arena.remove_on_drop(true);Sourcefn try_lock_exclusive(&self) -> Result<()>
Available on crate feature memmap and non-target_family="wasm" only.
fn try_lock_exclusive(&self) -> Result<()>
memmap and non-target_family="wasm" only.Try to lock the underlying file for exclusive access, only works on mmap with a file backend.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let mut arena = unsafe { Options::new().with_create_new(true).with_read(true).with_write(true).with_capacity(100).map_mut::<Arena, _>(&path).unwrap() };
arena.try_lock_exclusive().unwrap();
Available on crate feature memmap and non-target_family="wasm" only.
memmap and non-target_family="wasm" only.Try to lock the underlying file for shared access, only works on mmap with a file backend.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let mut arena = unsafe { Options::new().with_create_new(true).with_read(true).with_write(true).with_capacity(100).map_mut::<Arena, _>(&path).unwrap() };
arena.try_lock_shared().unwrap();
Sourcefn unlock(&self) -> Result<()>
Available on crate feature memmap and non-target_family="wasm" only.
fn unlock(&self) -> Result<()>
memmap and non-target_family="wasm" only.Unlocks the underlying file, only works on mmap with a file backend.
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};
let mut arena = unsafe { Options::new().with_create_new(true).with_read(true).with_write(true).with_capacity(100).map_mut::<Arena, _>(&path).unwrap() };
arena.lock_exclusive().unwrap();
// do some thing
arena.unlock().unwrap();
Dyn Compatibility§
This trait is not dyn compatible.
In older versions of Rust, dyn compatibility was called "object safety", so this trait is not object safe.