1use std::alloc::{GlobalAlloc, Layout};
2use std::sync::atomic::{AtomicBool, AtomicIsize, AtomicUsize, Ordering};
3use std::sync::Mutex;
4use nanorand::{Rng, WyRand};
5
6const ORD: Ordering = Ordering::Relaxed;
7
8pub struct BogoAlloc<const SIZE: usize> {
10 start: AtomicIsize,
11 size: AtomicUsize,
12 uninit: AtomicBool,
13 rand: Mutex<Option<WyRand>>,
14}
15
16impl<const SIZE: usize> BogoAlloc<SIZE> {
17 pub const fn new() -> Self {
19 Self {
20 start: AtomicIsize::new(0),
21 size: AtomicUsize::new(0),
22 uninit: AtomicBool::new(true),
23 rand: Mutex::new(None),
24 }
25 }
26 unsafe fn rand(&self) -> isize {
28 isize::from_le_bytes(self.rand.lock().unwrap().as_mut().unwrap_unchecked().rand())
29 }
30 unsafe fn rand_addr(&self) -> isize {
32 self.rand().abs() % self.size.load(ORD) as isize
33 }
34}
35
36unsafe impl<const SIZE: usize> GlobalAlloc for BogoAlloc<SIZE> {
37 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
38 if self.uninit.swap(false, ORD) {
40 let size: usize = 2_usize.pow(SIZE as u32);
41 self.size.store(size, ORD);
42 *self.rand.lock().unwrap() = Some(WyRand::new());
43 self.start.store(libc::malloc(size.into()) as isize, ORD);
44 } else {
45 while self.start.load(ORD) == 0 {}
46 }
47 let align = layout.align() as isize;
48 let mut offset = self.rand_addr().saturating_sub(layout.size() as isize); offset = (offset + align - 1) & !(align - 1); (self.start.load(ORD) + offset) as *mut u8
51 }
52
53 unsafe fn dealloc(&self, _: *mut u8, _: Layout) {
54 }
56}
57
58
59#[cfg(test)]
60mod tests {
61 use crate::BogoAlloc;
62
63 #[global_allocator]
64 static A: BogoAlloc<32> = BogoAlloc::new();
65
66 #[test]
67 fn funny_values_everywhere() {
68 let vecs = (1..42).map(|i| vec![i; 1500]).collect::<Vec<_>>();
69 println!("{:?}", vecs);
70 }
71}