pub struct MemoryMap<M> { /* private fields */ }
Expand description
The MemoryMap
struct provides a mechanism to map addresses from the linear address space
that memflow uses internally to hardware specific memory regions.
All memory addresses will be bounds checked.
Examples
use memflow::prelude::{MemoryMap, CTup2, umem};
let mut map = MemoryMap::new();
map.push_remap(0x1000.into(), 0x1000, 0.into()); // push region from 0x1000 - 0x1FFF
map.push_remap(0x3000.into(), 0x1000, 0x2000.into()); // push region from 0x3000 - 0x3FFFF
println!("{:?}", map);
// handle unmapped memory regions
let failed = &mut |CTup2(a, b)| {
println!("Unmapped: {} {}", a, b);
true
};
let hw_addr = map.map(0x10ff.into(), 8 as umem, Some(failed));
Implementations§
source§impl<M: SplitAtIndex> MemoryMap<M>
impl<M: SplitAtIndex> MemoryMap<M>
sourcepub fn new() -> Self
pub fn new() -> Self
Constructs a new memory map.
This function is identical to MemoryMap::default()
.
pub fn is_empty(&self) -> bool
sourcepub fn iter(&self) -> impl Iterator<Item = &MemoryMapping<M>>
pub fn iter(&self) -> impl Iterator<Item = &MemoryMapping<M>>
Iterator over memory mappings
sourcepub fn map<'a, T: 'a + SplitAtIndex, V: Callbackable<CTup2<Address, T>>>(
&'a self,
addr: Address,
buf: T,
out_fail: Option<&'a mut V>
) -> impl Iterator<Item = CTup3<M, Address, T>> + 'a
pub fn map<'a, T: 'a + SplitAtIndex, V: Callbackable<CTup2<Address, T>>>( &'a self, addr: Address, buf: T, out_fail: Option<&'a mut V> ) -> impl Iterator<Item = CTup3<M, Address, T>> + 'a
Maps a linear address range to a hardware address range.
Output element lengths will both match, so there is no need to do additonal clipping (for buf-to-buf copies).
Invalid regions get pushed to the out_fail
parameter. This function requries self
sourcepub fn map_base_iter<'a, T: 'a + SplitAtIndex, I: 'a + Iterator<Item = CTup3<Address, Address, T>>, V: Callbackable<CTup2<Address, T>>>(
&'a self,
iter: I,
out_fail: Option<&'a mut V>
) -> MemoryMapIterator<'a, I, M, T, V> ⓘ
pub fn map_base_iter<'a, T: 'a + SplitAtIndex, I: 'a + Iterator<Item = CTup3<Address, Address, T>>, V: Callbackable<CTup2<Address, T>>>( &'a self, iter: I, out_fail: Option<&'a mut V> ) -> MemoryMapIterator<'a, I, M, T, V> ⓘ
Maps a address range iterator to an address range.
Output element lengths will both match, so there is no need to do additonal clipping (for buf-to-buf copies).
Invalid regions get pushed to the out_fail
parameter
sourcepub fn map_iter<'a, T: 'a + SplitAtIndex, I: 'a + Iterator<Item = CTup3<PhysicalAddress, Address, T>>, V: Callbackable<CTup2<Address, T>>>(
&'a self,
iter: I,
out_fail: Option<&'a mut V>
) -> MemoryMapIterator<'a, impl Iterator<Item = CTup3<Address, Address, T>> + 'a, M, T, V> ⓘ
pub fn map_iter<'a, T: 'a + SplitAtIndex, I: 'a + Iterator<Item = CTup3<PhysicalAddress, Address, T>>, V: Callbackable<CTup2<Address, T>>>( &'a self, iter: I, out_fail: Option<&'a mut V> ) -> MemoryMapIterator<'a, impl Iterator<Item = CTup3<Address, Address, T>> + 'a, M, T, V> ⓘ
Maps a address range iterator to a hardware address range.
Output element lengths will both match, so there is no need to do additonal clipping (for buf-to-buf copies).
Invalid regions get pushed to the out_fail
parameter
source§impl MemoryMap<(Address, umem)>
impl MemoryMap<(Address, umem)>
sourcepub fn open<P: AsRef<Path>>(path: P) -> Result<Self>
pub fn open<P: AsRef<Path>>(path: P) -> Result<Self>
Constructs a new memory map by parsing the mapping table from a TOML file.
The file must contain a mapping table in the following format:
[[range]]
base=0x1000
length=0x1000
[[range]]
base=0x2000
length=0x1000
real_base=0x3000
The real_base
parameter is optional. If it is not set there will be no re-mapping.
sourcepub fn max_address(&self) -> Address
pub fn max_address(&self) -> Address
Returns the highest memory address that can be read.
pub fn real_size(&self) -> umem
sourcepub fn push_remap(
&mut self,
base: Address,
size: umem,
real_base: Address
) -> &mut Self
pub fn push_remap( &mut self, base: Address, size: umem, real_base: Address ) -> &mut Self
Adds a new memory mapping to this memory map by specifying base address and size of the mapping.
When adding overlapping memory regions this function will panic!
sourcepub fn push_range(
&mut self,
base: Address,
end: Address,
real_base: Address
) -> &mut Self
pub fn push_range( &mut self, base: Address, end: Address, real_base: Address ) -> &mut Self
Adds a new memory mapping to this memory map by specifying a range (base address and end addresses) of the mapping.
When adding overlapping memory regions this function will panic!
If end < base, the function will do nothing
sourcepub unsafe fn into_bufmap_mut<'a>(self) -> MemoryMap<&'a mut [u8]>
pub unsafe fn into_bufmap_mut<'a>(self) -> MemoryMap<&'a mut [u8]>
Transform address mapping into mutable buffer mapping
It will take the output address-size pair, and create mutable slice references to them.
Safety
The address mappings must be valid for the given lifetime 'a
, and should not
be aliased by any other memory references for fully defined behaviour.
However, aliasing should be fine for volatile memory cases such as analyzing running VM, since there are no safety guarantees anyways.
sourcepub unsafe fn into_bufmap<'a>(self) -> MemoryMap<&'a [u8]>
pub unsafe fn into_bufmap<'a>(self) -> MemoryMap<&'a [u8]>
Transform address mapping buffer buffer mapping
It will take the output address-size pair, and create slice references to them.
Safety
The address mappings must be valid for the given lifetime 'a
.
pub fn into_vec(self) -> Vec<PhysicalMemoryMapping>
pub fn from_vec(mem_map: Vec<PhysicalMemoryMapping>) -> Self
Trait Implementations§
source§impl<M: SplitAtIndex> Default for MemoryMap<M>
impl<M: SplitAtIndex> Default for MemoryMap<M>
source§impl<M: SplitAtIndex> IntoIterator for MemoryMap<M>
impl<M: SplitAtIndex> IntoIterator for MemoryMap<M>
§type IntoIter = Map<IntoIter<MemoryMapping<M>>, fn(_: MemoryMapping<M>) -> <MemoryMap<M> as IntoIterator>::Item>
type IntoIter = Map<IntoIter<MemoryMapping<M>>, fn(_: MemoryMapping<M>) -> <MemoryMap<M> as IntoIterator>::Item>
Auto Trait Implementations§
impl<M> !RefUnwindSafe for MemoryMap<M>
impl<M> Send for MemoryMap<M>where
M: Send,
impl<M> !Sync for MemoryMap<M>
impl<M> Unpin for MemoryMap<M>where
M: Unpin,
impl<M> UnwindSafe for MemoryMap<M>where
M: UnwindSafe,
Blanket Implementations§
source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
source§impl<'a, T> BorrowOwned<'a> for Twhere
T: 'a + Clone,
impl<'a, T> BorrowOwned<'a> for Twhere
T: 'a + Clone,
fn r_borrow( this: &'a <T as BorrowOwned<'a>>::ROwned ) -> <T as BorrowOwned<'a>>::RBorrowed
fn r_to_owned( this: <T as BorrowOwned<'a>>::RBorrowed ) -> <T as BorrowOwned<'a>>::ROwned
fn deref_borrowed(this: &<T as BorrowOwned<'a>>::RBorrowed) -> &T
fn deref_owned(this: &<T as BorrowOwned<'a>>::ROwned) -> &T
fn from_cow_borrow(this: &'a T) -> <T as BorrowOwned<'a>>::RBorrowed
fn from_cow_owned(this: <T as ToOwned>::Owned) -> <T as BorrowOwned<'a>>::ROwned
fn into_cow_borrow(this: <T as BorrowOwned<'a>>::RBorrowed) -> &'a T
fn into_cow_owned(this: <T as BorrowOwned<'a>>::ROwned) -> <T as ToOwned>::Owned
source§impl<I, T> FeedCallback<T> for Iwhere
I: IntoIterator<Item = T>,
impl<I, T> FeedCallback<T> for Iwhere
I: IntoIterator<Item = T>,
fn feed_into_mut(self, callback: &mut OpaqueCallback<'_, T>) -> usize
fn feed_into(self, callback: OpaqueCallback<'_, T>) -> usizewhere
Self: Sized,
source§impl<T> GetWithMetadata for T
impl<T> GetWithMetadata for T
§type ForSelf = WithMetadata_<T, T>
type ForSelf = WithMetadata_<T, T>
WithMetadata_<Self, Self>
§impl<S> ROExtAcc for S
impl<S> ROExtAcc for S
§fn f_get<F>(&self, offset: FieldOffset<S, F, Aligned>) -> &F
fn f_get<F>(&self, offset: FieldOffset<S, F, Aligned>) -> &F
offset
. Read more§fn f_get_mut<F>(&mut self, offset: FieldOffset<S, F, Aligned>) -> &mut F
fn f_get_mut<F>(&mut self, offset: FieldOffset<S, F, Aligned>) -> &mut F
offset
. Read more§fn f_get_ptr<F, A>(&self, offset: FieldOffset<S, F, A>) -> *const F
fn f_get_ptr<F, A>(&self, offset: FieldOffset<S, F, A>) -> *const F
offset
. Read more§fn f_get_mut_ptr<F, A>(&mut self, offset: FieldOffset<S, F, A>) -> *mut F
fn f_get_mut_ptr<F, A>(&mut self, offset: FieldOffset<S, F, A>) -> *mut F
offset
. Read more§impl<S> ROExtOps<Aligned> for S
impl<S> ROExtOps<Aligned> for S
§fn f_replace<F>(&mut self, offset: FieldOffset<S, F, Aligned>, value: F) -> F
fn f_replace<F>(&mut self, offset: FieldOffset<S, F, Aligned>, value: F) -> F
offset
) with value
,
returning the previous value of the field. Read more§fn f_get_copy<F>(&self, offset: FieldOffset<S, F, Aligned>) -> Fwhere
F: Copy,
fn f_get_copy<F>(&self, offset: FieldOffset<S, F, Aligned>) -> Fwhere
F: Copy,
§impl<S> ROExtOps<Unaligned> for S
impl<S> ROExtOps<Unaligned> for S
§fn f_replace<F>(&mut self, offset: FieldOffset<S, F, Unaligned>, value: F) -> F
fn f_replace<F>(&mut self, offset: FieldOffset<S, F, Unaligned>, value: F) -> F
offset
) with value
,
returning the previous value of the field. Read more§fn f_get_copy<F>(&self, offset: FieldOffset<S, F, Unaligned>) -> Fwhere
F: Copy,
fn f_get_copy<F>(&self, offset: FieldOffset<S, F, Unaligned>) -> Fwhere
F: Copy,
§impl<T> SelfOps for Twhere
T: ?Sized,
impl<T> SelfOps for Twhere
T: ?Sized,
§fn piped<F, U>(self, f: F) -> U
fn piped<F, U>(self, f: F) -> U
§fn piped_ref<'a, F, U>(&'a self, f: F) -> U
fn piped_ref<'a, F, U>(&'a self, f: F) -> U
piped
except that the function takes &Self
Useful for functions that take &Self
instead of Self
. Read more§fn piped_mut<'a, F, U>(&'a mut self, f: F) -> Uwhere
F: FnOnce(&'a mut Self) -> U,
fn piped_mut<'a, F, U>(&'a mut self, f: F) -> Uwhere
F: FnOnce(&'a mut Self) -> U,
piped
, except that the function takes &mut Self
.
Useful for functions that take &mut Self
instead of Self
.§fn mutated<F>(self, f: F) -> Self
fn mutated<F>(self, f: F) -> Self
§fn observe<F>(self, f: F) -> Self
fn observe<F>(self, f: F) -> Self
§fn as_ref_<T>(&self) -> &T
fn as_ref_<T>(&self) -> &T
AsRef
,
using the turbofish .as_ref_::<_>()
syntax. Read more