#[derive(Debug)]
pub struct MappedMemorySubrange<'a, Subrange: RelativeMemoryRange>(&'a MappedMemory, Subrange);
impl<'a, Subrange: RelativeMemoryRange> From<(&'a MappedMemory, Subrange)> for MappedMemorySubrange<'a, Subrange>
{
#[inline(always)]
fn from(from: (&'a MappedMemory, Subrange)) -> Self
{
Self(from.0, from.1)
}
}
impl<'a, Subrange: RelativeMemoryRange> AbsoluteMemoryRange for MappedMemorySubrange<'a, Subrange>
{
#[inline(always)]
fn inclusive_absolute_start_and_length(self) -> (VirtualAddress, usize)
{
self.0.sub_range_inner(&self.1)
}
#[inline(always)]
fn inclusive_absolute_start(self) -> VirtualAddress
{
self.0.sub_range_inner(&self.1).0
}
#[inline(always)]
fn length(self) -> usize
{
self.0.sub_range_inner(&self.1).1
}
}
impl<'a: 'b, 'b, Subrange: RelativeMemoryRange> AbsoluteMemoryRange for &'b MappedMemorySubrange<'a, Subrange>
{
#[inline(always)]
fn inclusive_absolute_start_and_length(self) -> (VirtualAddress, usize)
{
self.0.sub_range_inner(&self.1)
}
#[inline(always)]
fn inclusive_absolute_start(self) -> VirtualAddress
{
self.0.sub_range_inner(&self.1).0
}
#[inline(always)]
fn length(self) -> usize
{
self.0.sub_range_inner(&self.1).1
}
}
impl<'a, Subrange: RelativeMemoryRange + Copy> MappedMemorySubrange<'a, Subrange>
{
#[inline(always)]
pub fn lock(&self, memory_lock_settings: MemoryLockSettings) -> io::Result<bool>
{
self.0.lock_range(memory_lock_settings, self.1)
}
#[inline(always)]
pub fn unlock(&self) -> io::Result<bool>
{
self.0.unlock_range(self.1)
}
#[inline(always)]
pub fn advise(&self, advice: MemoryAdvice) -> Result<bool, MemoryAdviceError>
{
self.0.advise_range(advice, self.1)
}
#[inline(always)]
pub fn change_protection(&self, protection: ExtendedProtection) -> io::Result<()>
{
self.0.change_protection_range(protection, self.1)
}
#[inline(always)]
pub fn synchronize_with_backing_file(&self, synchronize: SynchronizeFlags) -> Result<(), ()>
{
self.0.synchronize_with_backing_file_range(synchronize, self.1)
}
#[inline(always)]
pub fn zero(&self)
{
self.0.zero_range(self.1)
}
#[inline(always)]
pub fn owns_reference<E>(&self, reference: &E) -> bool
{
self.owns_non_null(new_non_null(reference as *const E as *mut E as *mut u8))
}
#[inline(always)]
pub fn owns_non_null<E>(&self, non_null: NonNull<E>) -> bool
{
self.owns_pointer(non_null.as_ptr() as *const E)
}
#[inline(always)]
pub fn owns_pointer<E>(&self, pointer: *const E) -> bool
{
let (start, length) = self.inclusive_absolute_start_and_length();
pointer as *const u8 as usize;
let start: *const E = start.into();
if unlikely!(start > pointer)
{
return false
}
let end = self.virtual_address().offset_in_bytes(length).into();
pointer < end
}
#[inline(always)]
pub fn virtual_address(&self) -> VirtualAddress
{
self.inclusive_absolute_start()
}
#[inline(always)]
pub fn page_size(&self) -> PageSizeOrHugePageSize
{
self.0.page_size()
}
#[inline(always)]
pub fn mapped_size_in_bytes(&self) -> usize
{
self.length()
}
#[inline(always)]
pub fn number_of_pages(&self) -> usize
{
self.mapped_size_in_bytes() / (self.0.page_size_in_bytes().get() as usize)
}
}