Trait Allocator

Source
pub trait Allocator: Sealed {
    type Path;

Show 106 methods // Required methods fn reserved_bytes(&self) -> usize; fn reserved_slice(&self) -> &[u8] ; unsafe fn reserved_slice_mut(&self) -> &mut [u8] ; unsafe fn alloc<T>(&self) -> Result<RefMut<'_, T, Self>, Error>; fn alloc_aligned_bytes<T>( &self, size: u32, ) -> Result<BytesRefMut<'_, Self>, Error>; fn alloc_bytes(&self, size: u32) -> Result<BytesRefMut<'_, Self>, Error>; fn allocated(&self) -> usize; fn raw_mut_ptr(&self) -> *mut u8; fn raw_ptr(&self) -> *const u8; unsafe fn clear(&self) -> Result<(), Error>; unsafe fn dealloc(&self, offset: u32, size: u32) -> bool; fn discard_freelist(&self) -> Result<u32, Error>; fn discarded(&self) -> u32; fn increase_discarded(&self, size: u32); fn magic_version(&self) -> u16; fn minimum_segment_size(&self) -> u32; fn set_minimum_segment_size(&self, size: u32); fn path(&self) -> Option<&Self::Path>; unsafe fn offset(&self, ptr: *const u8) -> usize; fn page_size(&self) -> usize; fn refs(&self) -> usize; fn remaining(&self) -> usize; unsafe fn rewind(&self, pos: ArenaPosition); fn version(&self) -> u16; // Provided methods fn alloc_aligned_bytes_owned<T>( &self, size: u32, ) -> Result<BytesMut<Self>, Error> { ... } fn alloc_bytes_owned(&self, size: u32) -> Result<BytesMut<Self>, Error> { ... } unsafe fn alloc_owned<T>(&self) -> Result<Owned<T, Self>, Error> { ... } fn allocated_memory(&self) -> &[u8] { ... } fn capacity(&self) -> usize { ... } fn data_offset(&self) -> usize { ... } fn data(&self) -> &[u8] { ... } fn flush(&self) -> Result<()> { ... } fn flush_async(&self) -> Result<()> { ... } fn flush_range(&self, offset: usize, len: usize) -> Result<()> { ... } fn flush_async_range(&self, offset: usize, len: usize) -> Result<()> { ... } fn flush_header(&self) -> Result<()> { ... } fn flush_async_header(&self) -> Result<()> { ... } fn flush_header_and_range(&self, offset: usize, len: usize) -> Result<()> { ... } fn flush_async_header_and_range( &self, offset: usize, len: usize, ) -> Result<()> { ... } unsafe fn get_pointer(&self, offset: usize) -> *const u8 { ... } unsafe fn get_pointer_mut(&self, offset: usize) -> *mut u8 { ... } unsafe fn get_aligned_pointer<T>(&self, offset: usize) -> *const T { ... } unsafe fn get_aligned_pointer_mut<T>(&self, offset: usize) -> NonNull<T> { ... } unsafe fn get_bytes(&self, offset: usize, size: usize) -> &[u8] { ... } fn get_u8(&self, offset: usize) -> Result<u8, Error> { ... } fn get_i8(&self, offset: usize) -> Result<i8, Error> { ... } unsafe fn get_u8_unchecked(&self, offset: usize) -> u8 { ... } unsafe fn get_i8_unchecked(&self, offset: usize) -> i8 { ... } fn get_u16_be(&self, offset: usize) -> Result<u16, Error> { ... } unsafe fn get_u16_be_unchecked(&self, offset: usize) -> u16 { ... } fn get_u16_le(&self, offset: usize) -> Result<u16, Error> { ... } unsafe fn get_u16_le_unchecked(&self, offset: usize) -> u16 { ... } fn get_u32_be(&self, offset: usize) -> Result<u32, Error> { ... } unsafe fn get_u32_be_unchecked(&self, offset: usize) -> u32 { ... } fn get_u32_le(&self, offset: usize) -> Result<u32, Error> { ... } unsafe fn get_u32_le_unchecked(&self, offset: usize) -> u32 { ... } fn get_u64_be(&self, offset: usize) -> Result<u64, Error> { ... } unsafe fn get_u64_be_unchecked(&self, offset: usize) -> u64 { ... } fn get_u64_le(&self, offset: usize) -> Result<u64, Error> { ... } unsafe fn get_u64_le_unchecked(&self, offset: usize) -> u64 { ... } fn get_u128_be(&self, offset: usize) -> Result<u128, Error> { ... } unsafe fn get_u128_be_unchecked(&self, offset: usize) -> u128 { ... } fn get_u128_le(&self, offset: usize) -> Result<u128, Error> { ... } unsafe fn get_u128_le_unchecked(&self, offset: usize) -> u128 { ... } fn get_i16_be(&self, offset: usize) -> Result<i16, Error> { ... } unsafe fn get_i16_be_unchecked(&self, offset: usize) -> i16 { ... } fn get_i16_le(&self, offset: usize) -> Result<i16, Error> { ... } unsafe fn get_i16_le_unchecked(&self, offset: usize) -> i16 { ... } fn get_i32_be(&self, offset: usize) -> Result<i32, Error> { ... } unsafe fn get_i32_be_unchecked(&self, offset: usize) -> i32 { ... } fn get_i32_le(&self, offset: usize) -> Result<i32, Error> { ... } unsafe fn get_i32_le_unchecked(&self, offset: usize) -> i32 { ... } fn get_i64_be(&self, offset: usize) -> Result<i64, Error> { ... } unsafe fn get_i64_be_unchecked(&self, offset: usize) -> i64 { ... } fn get_i64_le(&self, offset: usize) -> Result<i64, Error> { ... } unsafe fn get_i64_le_unchecked(&self, offset: usize) -> i64 { ... } fn get_i128_be(&self, offset: usize) -> Result<i128, Error> { ... } unsafe fn get_i128_be_unchecked(&self, offset: usize) -> i128 { ... } fn get_i128_le(&self, offset: usize) -> Result<i128, Error> { ... } unsafe fn get_i128_le_unchecked(&self, offset: usize) -> i128 { ... } fn get_i16_varint(&self, offset: usize) -> Result<(usize, i16), Error> { ... } fn get_i32_varint(&self, offset: usize) -> Result<(usize, i32), Error> { ... } fn get_i64_varint(&self, offset: usize) -> Result<(usize, i64), Error> { ... } fn get_i128_varint(&self, offset: usize) -> Result<(usize, i128), Error> { ... } fn get_u16_varint(&self, offset: usize) -> Result<(usize, u16), Error> { ... } fn get_u32_varint(&self, offset: usize) -> Result<(usize, u32), Error> { ... } fn get_u64_varint(&self, offset: usize) -> Result<(usize, u64), Error> { ... } fn get_u128_varint(&self, offset: usize) -> Result<(usize, u128), Error> { ... } unsafe fn get_bytes_mut(&self, offset: usize, size: usize) -> &mut [u8] { ... } fn is_map(&self) -> bool { ... } fn is_ondisk(&self) -> bool { ... } fn is_inmemory(&self) -> bool { ... } fn is_map_anon(&self) -> bool { ... } fn is_map_file(&self) -> bool { ... } fn lock_exclusive(&self) -> Result<()> { ... } fn lock_shared(&self) -> Result<()> { ... } fn memory(&self) -> &[u8] { ... } fn checksum<S: BuildChecksumer>(&self, cks: &S) -> u64 { ... } fn unify(&self) -> bool { ... } unsafe fn mlock(&self, offset: usize, len: usize) -> Result<()> { ... } unsafe fn munlock(&self, offset: usize, len: usize) -> Result<()> { ... } fn read_only(&self) -> bool { ... } fn remove_on_drop(&self, remove_on_drop: bool) { ... } fn try_lock_exclusive(&self) -> Result<()> { ... } fn try_lock_shared(&self) -> Result<()> { ... } fn unlock(&self) -> Result<()> { ... }
}
Expand description

A trait for easily interacting with the sync and unsync allocator allocators.

Required Associated Types§

Source

type Path

Available on crate feature memmap and non-target_family="wasm" only.

The path type of the allocator.

Required Methods§

Source

fn reserved_bytes(&self) -> usize

Returns the number of bytes that are reserved by the allocator.

Source

fn reserved_slice(&self) -> &[u8]

Returns the reserved bytes of the allocator specified in the Options::with_reserved.

Source

unsafe fn reserved_slice_mut(&self) -> &mut [u8]

Returns the mutable reserved bytes of the allocator specified in the Options::with_reserved.

§Safety
  • The caller need to make sure there is no data-race
§Panic
  • If in read-only mode, and num of reserved bytes is greater than 0, this method will panic.
Source

unsafe fn alloc<T>(&self) -> Result<RefMut<'_, T, Self>, Error>

Allocates a T in the allocator.

§Safety
  • If T needs to be dropped and callers invoke RefMut::detach, then the caller must ensure that the T is dropped before the allocator is dropped. Otherwise, it will lead to memory leaks.

  • If this is file backed allocator, then T must be recoverable from bytes.

    1. Types require allocation are not recoverable.
    2. Pointers are not recoverable, like *const T, *mut T, NonNull and any structs contains pointers, although those types are on stack, but they cannot be recovered, when reopens the file.
§Examples
§Memory leak

The following example demonstrates the memory leak when the T is a heap allocated type and detached.


let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();

{
  let mut data = arena.alloc::<Vec<u8>>().unwrap();
  data.detach();
  data.write(vec![1, 2, 3]);
}

drop(arena); // memory leak, the `Vec<u8>` is not dropped.
§Undefined behavior

The following example demonstrates the undefined behavior when the T is not recoverable.


struct TypeOnHeap {
  data: Vec<u8>,
}

let arena = Options::new().with_create_new(1000).with_read(true).with_write(true).map_mut::<Arena, _>("path/to/file").unwrap();

let mut data = arena.alloc::<TypeOnHeap>().unwrap();
data.detach();
data.write(TypeOnHeap { data: vec![1, 2, 3] });
let offset = data.offset();
drop(arena);

// reopen the file
let arena = Options::new().with_read(true).map::<Arena, _>("path/to/file").unwrap();

let foo = &*arena.get_aligned_pointer::<TypeOnHeap>(offset as usize);
let b = foo.data[1]; // undefined behavior, the `data`'s pointer stored in the file is not valid anymore.
§Good practice

Some examples about how to use this method correctly.

§Heap allocated type with carefull memory management
let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();

// Do not invoke detach, so when the data is dropped, the drop logic will be handled by the allocator.
// automatically.
{
  let mut data = arena.alloc::<Vec<u8>>().unwrap();
  data.write(vec![1, 2, 3]);
}


let mut detached_data = arena.alloc::<Vec<u8>>().unwrap();
detached_data.detach();
detached_data.write(vec![4, 5, 6]);

// some other logic

core::ptr::drop_in_place(detached_data.as_mut()); // drop the `Vec` manually.

drop(arena); // it is safe, the `Vec` is already dropped.
§Recoverable type with file backed allocator

struct Recoverable {
  field1: u64,
  field2: AtomicU32,
}

let arena = Options::new().with_create_new(1000).with_read(true).with_write(true).map_mut::<Arena, _>("path/to/file").unwrap();

let mut data = arena.alloc::<Recoverable>().unwrap();
data.write(Recoverable { field1: 10, field2: AtomicU32::new(20) });
data.detach();
let offset = data.offset();
drop(arena);

// reopen the file
let arena = Options::new().with_read(true).map::<Arena, _>("path/to/file").unwrap();

let foo = &*arena.get_aligned_pointer::<Recoverable>(offset as usize);

assert_eq!(foo.field1, 10);
assert_eq!(foo.field2.load(Ordering::Acquire), 20);
Source

fn alloc_aligned_bytes<T>( &self, size: u32, ) -> Result<BytesRefMut<'_, Self>, Error>

Allocates a byte slice that can hold a well-aligned T and extra size bytes.

The layout of the allocated memory is:

| T | [u8; size] |
§Example
let mut bytes = arena.alloc_aligned_bytes::<T>(extra).unwrap();
bytes.put(val).unwrap(); // write `T` to the byte slice.
Source

fn alloc_bytes(&self, size: u32) -> Result<BytesRefMut<'_, Self>, Error>

Allocates a slice of memory in the allocator.

The BytesRefMut is zeroed out.

If you want a BytesMut, see alloc_bytes_owned.

Source

fn allocated(&self) -> usize

Returns the number of bytes allocated by the allocator.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};

let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let allocated = arena.allocated();
Source

fn raw_mut_ptr(&self) -> *mut u8

Returns the start pointer of the main memory of the allocator.

Source

fn raw_ptr(&self) -> *const u8

Returns the start pointer of the main memory of the allocator.

Source

unsafe fn clear(&self) -> Result<(), Error>

Clear the allocator.

§Safety
  • The current pointers get from the allocator cannot be used anymore after calling this method.
  • This method is not thread-safe.
§Examples

Undefine behavior:

let mut data = arena.alloc::<Vec<u8>>().unwrap();

arena.clear();

data.write(vec![1, 2, 3]); // undefined behavior

Good practice:

use rarena_allocator::{sync::Arena, Options, Allocator};

let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();

unsafe {
  let mut data = arena.alloc::<Vec<u8>>().unwrap();
  data.write(vec![1, 2, 3]);

  arena.clear().unwrap();
}
Source

unsafe fn dealloc(&self, offset: u32, size: u32) -> bool

Deallocates the memory at the given offset and size, the offset..offset + size will be made to a segment, returns true if the deallocation is successful.

§Safety
Source

fn discard_freelist(&self) -> Result<u32, Error>

Discards all freelist nodes in the allocator.

Returns the number of bytes discarded.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};

let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
arena.discard_freelist();
Source

fn discarded(&self) -> u32

Returns the number of bytes discarded by the allocator.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};

let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let discarded = arena.discarded();
Source

fn increase_discarded(&self, size: u32)

Forcelly increases the discarded bytes.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};

let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
arena.increase_discarded(100);
Source

fn magic_version(&self) -> u16

Returns the magic version of the allocator. This value can be used to check the compatibility for application using Allocator.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};

let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let magic_version = arena.magic_version();
Source

fn minimum_segment_size(&self) -> u32

Returns the minimum segment size of the allocator.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};

let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let min_segment_size = arena.minimum_segment_size();
Source

fn set_minimum_segment_size(&self, size: u32)

Sets the minimum segment size of the allocator.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};

let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
arena.set_minimum_segment_size(100);
Source

fn path(&self) -> Option<&Self::Path>

Available on crate feature memmap and non-target_family="wasm" only.

Returns the path of the mmap file, only returns Some when the ARENA is backed by a mmap file.

§Example

let path = arena.path();
Source

unsafe fn offset(&self, ptr: *const u8) -> usize

Returns the offset to the start of the allocator.

§Safety
  • ptr must be allocated by this allocator.
Source

fn page_size(&self) -> usize

Returns the page size.

If in no-std environment, then this method will return 4096. Otherwise, it will return the system’s page size.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};

let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let page_size = arena.page_size();
Source

fn refs(&self) -> usize

Returns the number of references to the allocator.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};

let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let refs = arena.refs();
Source

fn remaining(&self) -> usize

Returns the number of bytes remaining bytes can be allocated by the allocator.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};

let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let remaining = arena.remaining();
Source

unsafe fn rewind(&self, pos: ArenaPosition)

Set back the allocator’s main memory cursor to the given position.

§Safety
  • If the current position is larger than the given position, then the memory between the current position and the given position will be reclaimed, so must ensure the memory chunk between the current position and the given position will not be accessed anymore.
  • This method is not thread safe.
Source

fn version(&self) -> u16

Returns the version of the allocator.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};

let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let version = arena.version();

Provided Methods§

Source

fn alloc_aligned_bytes_owned<T>( &self, size: u32, ) -> Result<BytesMut<Self>, Error>

Allocates an owned byte slice that can hold a well-aligned T and extra size bytes.

The layout of the allocated memory is:

| T | [u8; size] |
§Example
let mut bytes = arena.alloc_aligned_bytes_owned::<T>(extra).unwrap();
bytes.put(val).unwrap(); // write `T` to the byte slice.
Source

fn alloc_bytes_owned(&self, size: u32) -> Result<BytesMut<Self>, Error>

Allocates an owned slice of memory in the allocator.

The cost of this method is an extra atomic operation, compared to alloc_bytes.

Source

unsafe fn alloc_owned<T>(&self) -> Result<Owned<T, Self>, Error>

Allocates a T in the allocator. Like alloc, but returns an Owned.

The cost is one more atomic operation than alloc.

§Safety
§Example
use rarena_allocator::{sync::Arena, Options, Allocator};

let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();

unsafe {
  let mut data = arena.alloc_owned::<u64>().unwrap();
  data.write(10);

  assert_eq!(*data.as_ref(), 10);
}
Source

fn allocated_memory(&self) -> &[u8]

Returns the whole main memory of the allocator as a byte slice.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};

let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let memory = arena.allocated_memory();
Source

fn capacity(&self) -> usize

Returns the capacity of the allocator.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};

let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let capacity = arena.capacity();
Source

fn data_offset(&self) -> usize

Returns the data offset of the allocator. The offset is the end of the reserved bytes of the allocator.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};

let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let data_offset = arena.data_offset();
Source

fn data(&self) -> &[u8]

Returns the data section of the allocator as a byte slice, header is not included.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};

let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let data = arena.data();
Source

fn flush(&self) -> Result<()>

Available on crate feature memmap and non-target_family="wasm" only.

Flushes the memory-mapped file to disk.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};



let mut arena = unsafe { Options::new().with_create_new(true).with_read(true).with_write(true).with_capacity(100).map_mut::<Arena, _>(&path).unwrap() };
arena.flush().unwrap();
Source

fn flush_async(&self) -> Result<()>

Available on crate feature memmap and non-target_family="wasm" only.

Flushes the memory-mapped file to disk asynchronously.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};



let mut arena = unsafe { Options::new().with_create_new(true).with_read(true).with_write(true).with_capacity(100).map_mut::<Arena, _>(&path).unwrap() };

arena.flush_async().unwrap();
Source

fn flush_range(&self, offset: usize, len: usize) -> Result<()>

Available on crate feature memmap and non-target_family="wasm" only.

Flushes outstanding memory map modifications in the range to disk.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};



let mut arena = unsafe { Options::new().with_create_new(true).with_read(true).with_write(true).with_capacity(100).map_mut::<Arena, _>(&path).unwrap() };
arena.flush_range(0, 100).unwrap();
Source

fn flush_async_range(&self, offset: usize, len: usize) -> Result<()>

Available on crate feature memmap and non-target_family="wasm" only.

Asynchronously flushes outstanding memory map modifications in the range to disk.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};



let mut arena = unsafe { Options::new().with_create_new(true).with_read(true).with_write(true).with_capacity(100).map_mut::<Arena, _>(&path).unwrap() };

arena.flush_async_range(0, 100).unwrap();
Source

fn flush_header(&self) -> Result<()>

Available on crate feature memmap and non-target_family="wasm" only.

Flushes outstanding memory map modifications in Allocator’s header to disk.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};



let mut arena = unsafe { Options::new().with_create_new(true).with_read(true).with_write(true).with_capacity(100).map_mut::<Arena, _>(&path).unwrap() };
arena.flush_header().unwrap();
Source

fn flush_async_header(&self) -> Result<()>

Available on crate feature memmap and non-target_family="wasm" only.

Asynchronously flushes outstanding memory map modifications Allocator’s header to disk.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};



let mut arena = unsafe { Options::new().with_create_new(true).with_read(true).with_write(true).with_capacity(100).map_mut::<Arena, _>(&path).unwrap() };

arena.flush_async_header().unwrap();
Source

fn flush_header_and_range(&self, offset: usize, len: usize) -> Result<()>

Available on crate feature memmap and non-target_family="wasm" only.

Flushes outstanding memory map modifications in the range and Allocator’s header to disk.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};



let mut arena = unsafe { Options::new().with_create_new(true).with_read(true).with_write(true).with_capacity(100).map_mut::<Arena, _>(&path).unwrap() };
arena.flush_header_and_range(0, 100).unwrap();
Source

fn flush_async_header_and_range(&self, offset: usize, len: usize) -> Result<()>

Available on crate feature memmap and non-target_family="wasm" only.

Asynchronously flushes outstanding memory map modifications in the range and Allocator’s header to disk.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};



let mut arena = unsafe { Options::new().with_create_new(true).with_read(true).with_write(true).with_capacity(100).map_mut::<Arena, _>(&path).unwrap() };

arena.flush_async_header_and_range(0, 100).unwrap();
Source

unsafe fn get_pointer(&self, offset: usize) -> *const u8

Returns a pointer to the memory at the given offset.

§Safety
  • offset must be less than the capacity of the allocator.
Source

unsafe fn get_pointer_mut(&self, offset: usize) -> *mut u8

Returns a pointer to the memory at the given offset. If the allocator is read-only, then this method will return a null pointer.

§Safety
  • offset must be less than the capacity of the allocator.
§Panic
  • If the allocator is read-only, then this method will panic.
Source

unsafe fn get_aligned_pointer<T>(&self, offset: usize) -> *const T

Returns an aligned pointer to the memory at the given offset.

§Safety
  • offset..offset + mem::size_of::<T>() + padding must be allocated memory.
  • offset must be less than the capacity of the allocator.
Source

unsafe fn get_aligned_pointer_mut<T>(&self, offset: usize) -> NonNull<T>

Returns an aligned pointer to the memory at the given offset. If the allocator is read-only, then this method will return a null pointer.

§Safety
  • offset..offset + mem::size_of::<T>() + padding must be allocated memory.
  • offset must be less than the capacity of the allocator.
§Panic
  • If the allocator is read-only, then this method will panic.
Source

unsafe fn get_bytes(&self, offset: usize, size: usize) -> &[u8]

Returns a bytes slice from the allocator.

§Safety
  • offset..offset + size must be allocated memory.
  • offset must be less than the capacity of the allocator.
  • size must be less than the capacity of the allocator.
  • offset + size must be less than the capacity of the allocator.
Source

fn get_u8(&self, offset: usize) -> Result<u8, Error>

Returns a u8 from the allocator.

Source

fn get_i8(&self, offset: usize) -> Result<i8, Error>

Returns a i8 from the allocator.

Source

unsafe fn get_u8_unchecked(&self, offset: usize) -> u8

Returns a u8 from the allocator without bounds checking.

§Safety
  • offset + size must be within the allocated memory of the allocator.
Source

unsafe fn get_i8_unchecked(&self, offset: usize) -> i8

Returns a i8 from the allocator without bounds checking.

§Safety
  • offset + size must be within the allocated memory of the allocator.
Source

fn get_u16_be(&self, offset: usize) -> Result<u16, Error>

Returns a u16 from the allocator.

Source

unsafe fn get_u16_be_unchecked(&self, offset: usize) -> u16

Returns a u16 from the allocator without bounds checking.

§Safety
  • offset..offset + size must be within allocated memory.
Source

fn get_u16_le(&self, offset: usize) -> Result<u16, Error>

Returns a u16 from the allocator.

Source

unsafe fn get_u16_le_unchecked(&self, offset: usize) -> u16

Returns a u16 from the allocator without bounds checking.

§Safety
  • offset..offset + size must be within allocated memory.
Source

fn get_u32_be(&self, offset: usize) -> Result<u32, Error>

Returns a u32 from the allocator.

Source

unsafe fn get_u32_be_unchecked(&self, offset: usize) -> u32

Returns a u32 from the allocator without bounds checking.

§Safety
  • offset..offset + size must be within allocated memory.
Source

fn get_u32_le(&self, offset: usize) -> Result<u32, Error>

Returns a u32 from the allocator.

Source

unsafe fn get_u32_le_unchecked(&self, offset: usize) -> u32

Returns a u32 from the allocator without bounds checking.

§Safety
  • offset..offset + size must be within allocated memory.
Source

fn get_u64_be(&self, offset: usize) -> Result<u64, Error>

Returns a u64 from the allocator.

Source

unsafe fn get_u64_be_unchecked(&self, offset: usize) -> u64

Returns a u64 from the allocator without bounds checking.

§Safety
  • offset..offset + size must be within allocated memory.
Source

fn get_u64_le(&self, offset: usize) -> Result<u64, Error>

Returns a u64 from the allocator.

Source

unsafe fn get_u64_le_unchecked(&self, offset: usize) -> u64

Returns a u64 from the allocator without bounds checking.

§Safety
  • offset..offset + size must be within allocated memory.
Source

fn get_u128_be(&self, offset: usize) -> Result<u128, Error>

Returns a u128 from the allocator.

Source

unsafe fn get_u128_be_unchecked(&self, offset: usize) -> u128

Returns a u128 from the allocator without bounds checking.

§Safety
  • offset..offset + size must be within allocated memory.
Source

fn get_u128_le(&self, offset: usize) -> Result<u128, Error>

Returns a u128 from the allocator.

Source

unsafe fn get_u128_le_unchecked(&self, offset: usize) -> u128

Returns a u128 from the allocator without bounds checking.

§Safety
  • offset..offset + size must be within allocated memory.
Source

fn get_i16_be(&self, offset: usize) -> Result<i16, Error>

Returns a i16 from the allocator.

Source

unsafe fn get_i16_be_unchecked(&self, offset: usize) -> i16

Returns a i16 from the allocator without bounds checking.

§Safety
  • offset..offset + size must be within allocated memory.
Source

fn get_i16_le(&self, offset: usize) -> Result<i16, Error>

Returns a i16 from the allocator.

Source

unsafe fn get_i16_le_unchecked(&self, offset: usize) -> i16

Returns a i16 from the allocator without bounds checking.

§Safety
  • offset..offset + size must be within allocated memory.
Source

fn get_i32_be(&self, offset: usize) -> Result<i32, Error>

Returns a i32 from the allocator.

Source

unsafe fn get_i32_be_unchecked(&self, offset: usize) -> i32

Returns a i32 from the allocator without bounds checking.

§Safety
  • offset..offset + size must be within allocated memory.
Source

fn get_i32_le(&self, offset: usize) -> Result<i32, Error>

Returns a i32 from the allocator.

Source

unsafe fn get_i32_le_unchecked(&self, offset: usize) -> i32

Returns a i32 from the allocator without bounds checking.

§Safety
  • offset..offset + size must be within allocated memory.
Source

fn get_i64_be(&self, offset: usize) -> Result<i64, Error>

Returns a i64 from the allocator.

Source

unsafe fn get_i64_be_unchecked(&self, offset: usize) -> i64

Returns a i64 from the allocator without bounds checking.

§Safety
  • offset..offset + size must be within allocated memory.
Source

fn get_i64_le(&self, offset: usize) -> Result<i64, Error>

Returns a i64 from the allocator.

Source

unsafe fn get_i64_le_unchecked(&self, offset: usize) -> i64

Returns a i64 from the allocator without bounds checking.

§Safety
  • offset..offset + size must be within allocated memory.
Source

fn get_i128_be(&self, offset: usize) -> Result<i128, Error>

Returns a i128 from the allocator.

Source

unsafe fn get_i128_be_unchecked(&self, offset: usize) -> i128

Returns a i128 from the allocator without bounds checking.

§Safety
  • offset..offset + size must be within allocated memory.
Source

fn get_i128_le(&self, offset: usize) -> Result<i128, Error>

Returns a i128 from the allocator.

Source

unsafe fn get_i128_le_unchecked(&self, offset: usize) -> i128

Returns a i128 from the allocator without bounds checking.

§Safety
  • offset..offset + size must be within allocated memory.
Source

fn get_i16_varint(&self, offset: usize) -> Result<(usize, i16), Error>

Returns a i16 in LEB128 format from the allocator at the given offset.

§Safety
  • offset must be within the allocated memory of the allocator.
Source

fn get_i32_varint(&self, offset: usize) -> Result<(usize, i32), Error>

Returns a i32 in LEB128 format from the allocator at the given offset.

§Safety
  • offset must be within the allocated memory of the allocator.
Source

fn get_i64_varint(&self, offset: usize) -> Result<(usize, i64), Error>

Returns a i64 in LEB128 format from the allocator at the given offset.

§Safety
  • offset must be within the allocated memory of the allocator.
Source

fn get_i128_varint(&self, offset: usize) -> Result<(usize, i128), Error>

Returns a i128 in LEB128 format from the allocator at the given offset.

§Safety
  • offset must be within the allocated memory of the allocator.
Source

fn get_u16_varint(&self, offset: usize) -> Result<(usize, u16), Error>

Returns a u16 in LEB128 format from the allocator at the given offset.

§Safety
  • offset must be within the allocated memory of the allocator.
Source

fn get_u32_varint(&self, offset: usize) -> Result<(usize, u32), Error>

Returns a u32 in LEB128 format from the allocator at the given offset.

§Safety
  • offset must be within the allocated memory of the allocator.
Source

fn get_u64_varint(&self, offset: usize) -> Result<(usize, u64), Error>

Returns a u64 in LEB128 format from the allocator at the given offset.

§Safety
  • offset must be within the allocated memory of the allocator.
Source

fn get_u128_varint(&self, offset: usize) -> Result<(usize, u128), Error>

Returns a u128 in LEB128 format from the allocator at the given offset.

§Safety
  • offset must be within the allocated memory of the allocator.
Source

unsafe fn get_bytes_mut(&self, offset: usize, size: usize) -> &mut [u8]

Returns a mutable bytes slice from the allocator. If the allocator is read-only, then this method will return an empty slice.

§Safety
  • offset..offset + size must be allocated memory.
  • offset must be less than the capacity of the allocator.
  • size must be less than the capacity of the allocator.
  • offset + size must be less than the capacity of the allocator.
§Panic
  • If the allocator is read-only, then this method will panic.
Source

fn is_map(&self) -> bool

Available on crate feature memmap and non-target_family="wasm" only.

Returns true if the allocator is created through memory map.

§Example
use rarena_allocator::{sync::Arena, Allocator, Options};

let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let is_map = arena.is_map();
assert_eq!(is_map, false);
Source

fn is_ondisk(&self) -> bool

Returns true if the allocator is on disk.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};

let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let is_ondisk = arena.is_ondisk();
assert_eq!(is_ondisk, false);
Source

fn is_inmemory(&self) -> bool

Returns true if the allocator is in memory.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};

let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let is_inmemory = arena.is_inmemory();
assert_eq!(is_inmemory, true);
Source

fn is_map_anon(&self) -> bool

Available on crate feature memmap and non-target_family="wasm" only.

Returns true if the allocator is on-disk and created through memory map.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};

let arena = Options::new().with_capacity(100).map_anon::<Arena>().unwrap();
let is_map_anon = arena.is_map_anon();
assert_eq!(is_map_anon, true);
Source

fn is_map_file(&self) -> bool

Available on crate feature memmap and non-target_family="wasm" only.

Returns true if the allocator is on-disk and created through memory map.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};

let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let is_map_file = arena.is_map_file();
assert_eq!(is_map_file, false);
Source

fn lock_exclusive(&self) -> Result<()>

Available on crate feature memmap and non-target_family="wasm" only.

Locks the underlying file for exclusive access, only works on mmap with a file backend.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};



let mut arena = unsafe { Options::new().with_create_new(true).with_read(true).with_write(true).with_capacity(100).map_mut::<Arena, _>(&path).unwrap() };
arena.lock_exclusive().unwrap();
Source

fn lock_shared(&self) -> Result<()>

Available on crate feature memmap and non-target_family="wasm" only.

Locks the underlying file for shared access, only works on mmap with a file backend.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};



let mut arena = unsafe { Options::new().with_create_new(true).with_read(true).with_write(true).with_capacity(100).map_mut::<Arena, _>(&path).unwrap() };
arena.lock_shared().unwrap();
Source

fn memory(&self) -> &[u8]

Returns the whole main memory of the allocator as a byte slice.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};

let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let memory = arena.memory();
Source

fn checksum<S: BuildChecksumer>(&self, cks: &S) -> u64

Calculates the checksum of the allocated memory (excluding the reserved memory specified by users through Options::with_reserved) of the allocator.

Source

fn unify(&self) -> bool

Returns true if the allocator is unify memory layout.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};

let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
assert_eq!(arena.unify(), false);

let arena = Options::new().with_capacity(100).with_unify(true).alloc::<Arena>().unwrap();
assert_eq!(arena.unify(), true);
Source

unsafe fn mlock(&self, offset: usize, len: usize) -> Result<()>

Available on crate feature memmap and non-target_family="wasm" only.

mlock(ptr, len)—Lock memory into RAM.

§Safety

This function operates on raw pointers, but it should only be used on memory which the caller owns. Technically, locking memory shouldn’t violate any invariants, but since unlocking it can violate invariants, this function is also unsafe for symmetry.

Some implementations implicitly round the memory region out to the nearest page boundaries, so this function may lock more memory than explicitly requested if the memory isn’t page-aligned. Other implementations fail if the memory isn’t page-aligned.

§References
Source

unsafe fn munlock(&self, offset: usize, len: usize) -> Result<()>

Available on crate feature memmap and non-target_family="wasm" only.

munlock(ptr, len)—Unlock memory.

§Safety

This function operates on raw pointers, but it should only be used on memory which the caller owns, to avoid compromising the mlock invariants of other unrelated code in the process.

Some implementations implicitly round the memory region out to the nearest page boundaries, so this function may unlock more memory than explicitly requested if the memory isn’t page-aligned.

§References
Source

fn read_only(&self) -> bool

Returns true if the arena is read-only.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};

let arena = Options::new().with_capacity(100).alloc::<Arena>().unwrap();
let read_only = arena.read_only();
Source

fn remove_on_drop(&self, remove_on_drop: bool)

Available on crate feature memmap and non-target_family="wasm" only.

Sets remove on drop, only works on mmap with a file backend.

Default is false.

WARNING: Once set to true, the backed file will be removed when the allocator is dropped, even though the file is opened in read-only mode.

§Example

arena.remove_on_drop(true);
Source

fn try_lock_exclusive(&self) -> Result<()>

Available on crate feature memmap and non-target_family="wasm" only.

Try to lock the underlying file for exclusive access, only works on mmap with a file backend.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};



let mut arena = unsafe { Options::new().with_create_new(true).with_read(true).with_write(true).with_capacity(100).map_mut::<Arena, _>(&path).unwrap() };
arena.try_lock_exclusive().unwrap();
Source

fn try_lock_shared(&self) -> Result<()>

Available on crate feature memmap and non-target_family="wasm" only.

Try to lock the underlying file for shared access, only works on mmap with a file backend.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};



let mut arena = unsafe { Options::new().with_create_new(true).with_read(true).with_write(true).with_capacity(100).map_mut::<Arena, _>(&path).unwrap() };
arena.try_lock_shared().unwrap();
Source

fn unlock(&self) -> Result<()>

Available on crate feature memmap and non-target_family="wasm" only.

Unlocks the underlying file, only works on mmap with a file backend.

§Example
use rarena_allocator::{sync::Arena, Options, Allocator};



let mut arena = unsafe { Options::new().with_create_new(true).with_read(true).with_write(true).with_capacity(100).map_mut::<Arena, _>(&path).unwrap() };
arena.lock_exclusive().unwrap();

// do some thing
arena.unlock().unwrap();

Dyn Compatibility§

This trait is not dyn compatible.

In older versions of Rust, dyn compatibility was called "object safety", so this trait is not object safe.

Implementors§

Source§

impl Allocator for rarena_allocator::sync::Arena

Source§

impl Allocator for rarena_allocator::unsync::Arena