swage_core/memory/
mod.rs

1//! The `memory` module provides abstractions for memory management, initialization, and checking for bitflips.
2//!
3//! The `memory` module provides the following abstractions:
4//! - `Memory`: A managed memory region that is allocated using HugepageAllocator.
5//! - `VictimMemory`: A trait that combines the `BytePointer`, `Initializable`, and `Checkable` traits.
6//! - `BytePointer`: A trait for accessing memory as a byte pointer.
7//! - `Initializable`: A trait for initializing memory with (random) values.
8//! - `Checkable`: A trait for checking memory for bitflips.
9//! - `PfnResolver`: A trait for resolving the physical frame number (PFN) of a `self`.
10//! - `LinuxPageMap`: A struct that provides a mapping from virtual to physical addresses.
11//! - `VirtToPhysResolver`: A trait for resolving the physical address of a provided virtual address.
12//!
13//! The `memory` module also provides the following helper structs:
14//! - `ConsecBlocks`: A struct that represents a collection of consecutive memory blocks.
15//! - `MemBlock`: A struct that represents a memory block.
16//! - `PfnOffset`: A struct that represents a physical frame number (PFN) offset.
17//! - `PfnOffsetResolver`: A struct that resolves the physical frame number (PFN) offset of a provided virtual address.
18//! - `Timer`: A struct that provides a timer for measuring memory access times.
19//!
20//! The `memory` module also provides the following helper functions:
21//! - `construct_memory_tuple_timer`: A function that constructs a memory tuple timer.
22mod consec_blocks;
23mod dram_addr;
24mod flippy_page;
25mod keyed_cache;
26mod mem_configuration;
27mod memblock;
28mod pagemap_info;
29mod pfn_offset;
30mod pfn_offset_resolver;
31mod pfn_resolver;
32mod timer;
33mod virt_to_phys;
34
35pub use self::consec_blocks::ConsecBlocks;
36pub use self::dram_addr::DRAMAddr;
37pub use self::flippy_page::{FlippyPage, find_flippy_page};
38pub use self::mem_configuration::{MTX_SIZE, MemConfiguration};
39pub use self::memblock::{Error as ConsecPfnsError, FormatPfns, GetConsecPfns, Memory};
40pub use self::pfn_offset::PfnOffset;
41pub use self::pfn_offset_resolver::PfnOffsetResolver;
42pub use self::pfn_resolver::PfnResolver;
43pub use self::timer::{MemoryTupleTimer, TimerError, construct_memory_tuple_timer};
44pub use self::virt_to_phys::PhysAddr;
45pub use self::virt_to_phys::{LinuxPageMap, LinuxPageMapError, VirtToPhysResolver};
46use rand::Rng as _;
47use serde::Serialize;
48use std::arch::x86_64::_mm_clflush;
49use std::fmt::Debug;
50use std::io::BufWriter;
51
52use crate::util::{CL_SIZE, PAGE_MASK, PAGE_SIZE, ROW_MASK, ROW_SIZE, Rng};
53
54use libc::{c_void, memcmp};
55use log::{debug, info, trace};
56use std::{arch::x86_64::_mm_mfence, fmt};
57
58/// Pointer type for aggressor row addresses.
59///
60/// Used to identify memory rows that are hammered to induce bit flips
61/// in adjacent victim rows.
62pub type AggressorPtr = *const u8;
63
64/// Errors that can occur during memory operations.
65#[derive(Debug)]
66pub enum MemoryError {
67    /// Memory allocation failed
68    AllocFailed,
69    /// Attempted to create a zero-size memory layout
70    ZeroSizeLayout,
71}
72
73impl std::error::Error for MemoryError {}
74
75impl fmt::Display for MemoryError {
76    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
77        match self {
78            MemoryError::AllocFailed => write!(f, "Allocation failed"),
79            MemoryError::ZeroSizeLayout => write!(f, "Zero size layout"),
80        }
81    }
82}
83
84/// Combined trait for victim memory regions.
85///
86/// This trait combines [`BytePointer`], [`Initializable`], and [`Checkable`] to provide
87/// a complete interface for managing victim memory in Rowhammer attacks.
88pub trait VictimMemory: BytePointer + Initializable + Checkable {}
89
90/// Trait for accessing memory as a byte pointer.
91///
92/// Provides low-level access to memory regions with byte-level addressing.
93#[allow(clippy::len_without_is_empty)]
94pub trait BytePointer {
95    /// Returns a mutable pointer to the byte at the given offset.
96    ///
97    /// # Safety
98    ///
99    /// The returned pointer is valid only while the memory region exists.
100    /// Dereferencing requires unsafe code and proper synchronization.
101    fn addr(&self, offset: usize) -> *mut u8;
102
103    /// Returns a mutable pointer to the start of the memory region.
104    fn ptr(&self) -> *mut u8;
105
106    /// Returns the total length of the memory region in bytes.
107    fn len(&self) -> usize;
108
109    /// Dumps memory contents to a file in hexadecimal format.
110    ///
111    /// Writes each row (8KB) as a line of hexadecimal bytes.
112    ///
113    /// # Errors
114    ///
115    /// Returns an error if file creation or writing fails.
116    fn dump(&self, file: &str) -> std::io::Result<()> {
117        use std::fs::File;
118        use std::io::Write;
119        let file = File::create(file)?;
120        let mut writer = BufWriter::new(file);
121        for offset in (0..self.len()).step_by(ROW_SIZE) {
122            for byte_offset in 0..ROW_SIZE {
123                write!(writer, "{:02x}", unsafe {
124                    *self.addr(offset + byte_offset)
125                })?;
126            }
127            writer.write_all(b"\n")?;
128        }
129        writer.flush()?;
130        Ok(())
131    }
132}
133
134/// Memory initialization patterns for Rowhammer attacks.
135///
136/// Different patterns can be used to maximize the probability of inducing bit flips.
137/// Stripe patterns alternate between aggressor rows (ones/zeros) and victim rows
138/// (opposite values) to create charge transfer between adjacent DRAM rows.
139#[derive(Clone, Debug, PartialEq, Eq, Serialize)]
140pub enum DataPattern {
141    /// Random data pattern using a seeded RNG
142    Random(Box<Rng>),
143    /// Stripe pattern with zeros at aggressor rows, ones elsewhere
144    StripeZero {
145        /// The rows to contain 0x00
146        #[serde(skip_serializing)]
147        zeroes: Vec<AggressorPtr>,
148    },
149    /// All zeros (0x00)
150    Zero,
151    /// Stripe pattern with ones at aggressor rows, zeros elsewhere
152    StripeOne {
153        /// The rows to contain 0xFF
154        #[serde(skip_serializing)]
155        ones: Vec<AggressorPtr>,
156    },
157    /// All ones (0xFF)
158    One,
159}
160
161impl DataPattern {
162    fn get(&mut self, addr: *const u8) -> [u8; PAGE_SIZE] {
163        match self {
164            DataPattern::Random(rng) => {
165                let mut arr = [0u8; PAGE_SIZE];
166                for byte in arr.iter_mut() {
167                    *byte = rng.random();
168                }
169                arr
170            }
171            DataPattern::StripeZero { zeroes } => {
172                for &row in zeroes.iter() {
173                    if (row as usize) == addr as usize & !ROW_MASK {
174                        trace!("setting aggressor page to 0x00 at addr {:p}", addr);
175                        return [0x00; PAGE_SIZE];
176                    }
177                }
178                [0xFF; PAGE_SIZE]
179            }
180            DataPattern::Zero => [0x00; PAGE_SIZE],
181            DataPattern::StripeOne { ones } => {
182                for &row in ones.iter() {
183                    if (row as usize) == addr as usize & !ROW_MASK {
184                        trace!("setting aggressor page to 0xFF at addr {:p}", addr);
185                        return [0xFF; PAGE_SIZE];
186                    }
187                }
188                [0x00; PAGE_SIZE]
189            }
190            DataPattern::One => [0xFF; PAGE_SIZE],
191        }
192    }
193}
194
195/// Trait for initializing memory with specific patterns.
196///
197/// Provides methods to write data patterns to memory, either for all pages
198/// or excluding specific pages.
199pub trait Initializable {
200    /// Initializes memory with the given data pattern.
201    fn initialize(&self, pattern: DataPattern);
202
203    /// Initializes memory excluding specific pages.
204    fn initialize_excluding(&self, pattern: DataPattern, pages: &[*const u8]);
205
206    /// Initializes memory using a callback function.
207    ///
208    /// The callback receives an offset and returns optional page data.
209    fn initialize_cb(&self, f: &mut dyn FnMut(usize) -> Option<[u8; PAGE_SIZE]>);
210}
211
212/// Represents a bit flip detected in memory.
213///
214/// A bit flip is a change in memory where one or more bits differ from their
215/// expected value. This is the primary indicator of a successful Rowhammer attack.
216#[derive(Clone, Copy, Serialize, PartialEq, Eq, Hash)]
217pub struct BitFlip {
218    /// Virtual address where the bit flip occurred
219    pub addr: usize,
220    /// Bitmask indicating which bits flipped (1 = bit flipped)
221    pub bitmask: u8,
222    /// The expected data value (before the flip)
223    pub data: u8,
224}
225
226/// Direction of bit flip transitions.
227///
228/// Indicates whether bits flipped from 0→1, 1→0, or multiple directions.
229#[derive(Clone, Debug, Serialize, Eq, PartialEq)]
230pub enum FlipDirection {
231    /// Bit flipped from 0 to 1
232    ZeroToOne,
233    /// Bit flipped from 1 to 0
234    OneToZero,
235    /// Multiple bits flipped in (potentially) different directions
236    Multiple(Vec<FlipDirection>),
237    /// No bit flip occurred
238    None,
239    /// Any flip direction is acceptable
240    Any,
241}
242
243impl core::fmt::Debug for BitFlip {
244    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
245        f.debug_struct("BitFlip")
246            .field("addr", &format_args!("{:#x}", self.addr))
247            .field("bitmask", &format_args!("{:#x}", self.bitmask))
248            .field("data", &format_args!("{:#x}", self.data))
249            .finish()
250    }
251}
252
253impl BitFlip {
254    /// Constructor for BitFlip
255    pub fn new(addr: *const u8, bitmask: u8, data: u8) -> Self {
256        BitFlip {
257            addr: addr as usize,
258            bitmask,
259            data,
260        }
261    }
262}
263
264impl BitFlip {
265    /// Calculate the FlipDirection (1->0 or 0->1 or Multiple) observed in this BitFlip
266    pub fn flip_direction(&self) -> FlipDirection {
267        match self.bitmask.count_ones() {
268            0 => FlipDirection::None,
269            1 => {
270                let flipped = self.bitmask & self.data;
271                match flipped {
272                    0 => FlipDirection::ZeroToOne,
273                    _ => FlipDirection::OneToZero,
274                }
275            }
276            2.. => FlipDirection::Multiple(
277                (0..8)
278                    .filter_map(|i| {
279                        if self.bitmask & (1 << i) != 0 {
280                            Some(if self.data & (1 << i) != 0 {
281                                FlipDirection::OneToZero
282                            } else {
283                                FlipDirection::ZeroToOne
284                            })
285                        } else {
286                            None
287                        }
288                    })
289                    .collect(),
290            ),
291        }
292    }
293}
294
295/// Trait for checking memory regions for bit flips.
296///
297/// Implementors provide methods to compare memory contents against expected patterns
298/// and identify locations where bit flips have occurred.
299pub trait Checkable {
300    /// Checks memory against a pattern and returns detected bit flips.
301    fn check(&self, pattern: DataPattern) -> Vec<BitFlip>;
302
303    /// Checks memory excluding specific pages.
304    fn check_excluding(&self, pattern: DataPattern, pages: &[*const u8]) -> Vec<BitFlip>;
305
306    /// Checks memory using a callback function to generate expected values.
307    fn check_cb(&self, f: &mut dyn FnMut(usize) -> Option<[u8; PAGE_SIZE]>) -> Vec<BitFlip>;
308}
309
310/// Blanket implementations for Initializable trait for VictimMemory
311impl<T> Initializable for T
312where
313    T: VictimMemory,
314{
315    fn initialize(&self, pattern: DataPattern) {
316        self.initialize_excluding(pattern, &[]);
317    }
318
319    fn initialize_excluding(&self, mut pattern: DataPattern, pages: &[*const u8]) {
320        info!(
321            "initialize buffer with pattern {}",
322            match &pattern {
323                DataPattern::Random(rng) => format!("random ({:?})", rng),
324                DataPattern::StripeZero { .. } => "stripe zero".into(),
325                DataPattern::Zero => "zero".into(),
326                DataPattern::StripeOne { .. } => "stripe one".into(),
327                DataPattern::One => "one".into(),
328            }
329        );
330        self.initialize_cb(&mut |offset: usize| {
331            let addr = self.addr(offset);
332            let val = pattern.get(addr); // we must call "get" on addr, even if we don't use it, because pattern RNG is stateful
333            if pages
334                .iter()
335                .any(|&page| page as usize & !PAGE_MASK == addr as usize & !PAGE_MASK)
336            {
337                return None;
338            }
339            Some(val)
340        });
341    }
342
343    fn initialize_cb(&self, f: &mut dyn FnMut(usize) -> Option<[u8; PAGE_SIZE]>) {
344        let len = self.len();
345        if !len.is_multiple_of(8) {
346            panic!("memory len must be divisible by 8");
347        }
348        if !len.is_multiple_of(PAGE_SIZE) {
349            panic!(
350                "memory len ({}) must be divisible by PAGE_SIZE ({})",
351                len, PAGE_SIZE
352            );
353        }
354
355        debug!("initialize {} bytes", len);
356
357        for offset in (0..len).step_by(PAGE_SIZE) {
358            if let Some(value) = f(offset) {
359                unsafe {
360                    std::ptr::write_volatile(self.addr(offset) as *mut [u8; PAGE_SIZE], value);
361                }
362            }
363        }
364        debug!("memory init done");
365    }
366}
367
368/// Blanket implementation for PfnResolver trait for BytePointer
369impl<T: BytePointer> PfnResolver for T {
370    fn pfn(&self) -> Result<PhysAddr, LinuxPageMapError> {
371        let mut resolver = LinuxPageMap::new()?;
372        resolver.get_phys(self.ptr() as u64)
373    }
374}
375
376/// Blanket implementation for Checkable trait for VictimMemory
377impl<T> Checkable for T
378where
379    T: VictimMemory,
380{
381    fn check(&self, pattern: DataPattern) -> Vec<BitFlip> {
382        self.check_excluding(pattern, &[])
383    }
384
385    fn check_excluding(&self, mut pattern: DataPattern, pages: &[*const u8]) -> Vec<BitFlip> {
386        self.check_cb(&mut |offset: usize| {
387            let addr = self.addr(offset);
388            let val = pattern.get(addr); // we must call "get" on addr, even if we don't use it, because pattern RNG is stateful
389            if pages
390                .iter()
391                .any(|&page| page as usize & !PAGE_MASK == addr as usize & !PAGE_MASK)
392            {
393                return None;
394            }
395            Some(val)
396        })
397    }
398
399    fn check_cb(&self, f: &mut dyn FnMut(usize) -> Option<[u8; PAGE_SIZE]>) -> Vec<BitFlip> {
400        let len = self.len();
401        if !len.is_multiple_of(PAGE_SIZE) {
402            panic!(
403                "memory len ({}) must be divisible by PAGE_SIZE ({})",
404                len, PAGE_SIZE
405            );
406        }
407
408        let mut ret = vec![];
409        for offset in (0..len).step_by(PAGE_SIZE) {
410            if let Some(expected) = f(offset) {
411                unsafe {
412                    for byte_offset in (0..PAGE_SIZE).step_by(CL_SIZE) {
413                        _mm_clflush(self.addr(offset + byte_offset));
414                    }
415                    _mm_mfence();
416                    let cmp = memcmp(
417                        self.addr(offset) as *const c_void,
418                        expected.as_ptr() as *const c_void,
419                        PAGE_SIZE,
420                    );
421                    if cmp == 0 {
422                        continue;
423                    }
424                    debug!(
425                        "Found bitflip in page {}. Determining exact flip position",
426                        offset
427                    );
428                    for (i, &expected) in expected.iter().enumerate() {
429                        let addr = self.addr(offset + i);
430                        _mm_clflush(addr);
431                        _mm_mfence();
432                        if *addr != expected {
433                            ret.push(BitFlip::new(addr, *addr ^ expected, expected));
434                        }
435                    }
436                }
437            } else {
438                debug!("skipping page {} due to exclusion", offset);
439            }
440        }
441        ret
442    }
443}
444
445#[test]
446fn test_pattern_random_clone() {
447    let pattern = DataPattern::Random(Box::new(Rng::from_seed(rand::random())));
448    let a = pattern.clone().get(std::ptr::null());
449    let b = pattern.clone().get(std::ptr::null());
450    assert_eq!(a, b);
451}
452
453#[test]
454fn test_bitflip_direction() {
455    let flip = BitFlip::new(std::ptr::null(), 0b0000_0000, 0xFF);
456    assert_eq!(flip.flip_direction(), FlipDirection::None);
457    let flip = BitFlip::new(std::ptr::null(), 0b0000_0001, 0b0000_0001);
458    assert_eq!(flip.flip_direction(), FlipDirection::OneToZero);
459
460    let flip = BitFlip::new(std::ptr::null(), 0b0000_0001, 0b1111_1110);
461    assert_eq!(flip.flip_direction(), FlipDirection::ZeroToOne);
462
463    let flip = BitFlip::new(std::ptr::null(), 0b0000_0011, 0b0000_0010);
464    assert_eq!(
465        flip.flip_direction(),
466        FlipDirection::Multiple(vec![FlipDirection::ZeroToOne, FlipDirection::OneToZero])
467    );
468
469    let flip = BitFlip::new(std::ptr::null(), 0b0000_0011, 0b0000_0000);
470    assert_eq!(
471        flip.flip_direction(),
472        FlipDirection::Multiple(vec![FlipDirection::ZeroToOne, FlipDirection::ZeroToOne])
473    );
474
475    let flip = BitFlip::new(std::ptr::null(), 0b0000_0011, 0b0000_0011);
476    assert_eq!(
477        flip.flip_direction(),
478        FlipDirection::Multiple(vec![FlipDirection::OneToZero, FlipDirection::OneToZero])
479    );
480}