1#![deny(missing_docs)]
2use std::alloc::{GlobalAlloc, Layout};
131use std::cell::{Cell, RefCell};
132use std::thread_local;
133
134#[cfg(feature = "tracing")]
135pub mod tracing;
138
139fn hash_fn(p: usize) -> u128 {
141 const PRIME1: u128 = 257343791756393576901679996513787191589;
142 const PRIME2: u128 = 271053192961985756828288246809453504189;
143 let mut p = (p as u128).wrapping_add(PRIME2);
144 p = p.wrapping_mul(PRIME1);
145 p = p ^ (p >> 64);
146 p = p.wrapping_mul(PRIME2);
147 p = p ^ (p >> 42);
148 p = p.wrapping_mul(PRIME1);
149 p = p ^ (p >> 25);
150 p
151}
152
153#[derive(Default)]
154struct LocalState {
155 ptr_accum: u128,
156 ptr_size_accum: u128,
157 ptr_align_accum: u128,
158 num_allocs: u64,
159 num_frees: u64,
160 mem_allocated: u64,
161 mem_freed: u64,
162 peak_mem: u64,
163 peak_mem_allocs: u64,
164 #[cfg(feature = "tracing")]
165 tracing: tracing::TracingState,
166}
167
168impl LocalState {
169 fn record_alloc(&mut self, ptr: *const u8, layout: Layout) {
170 if ptr.is_null() {
171 return;
172 }
173 let ptr_hash = hash_fn(ptr as usize);
174 let size_hash = hash_fn(layout.size());
175 let align_hash = hash_fn(layout.align());
176 self.ptr_accum = self.ptr_accum.wrapping_add(ptr_hash);
177 self.ptr_size_accum = self
178 .ptr_size_accum
179 .wrapping_add(ptr_hash.wrapping_mul(size_hash));
180 self.ptr_align_accum = self
181 .ptr_align_accum
182 .wrapping_add(ptr_hash.wrapping_mul(align_hash));
183 self.num_allocs += 1;
184 self.mem_allocated += layout.size() as u64;
185
186 if self.mem_allocated > self.mem_freed {
187 let mem_usage = self.mem_allocated - self.mem_freed;
188 if mem_usage > self.peak_mem {
189 self.peak_mem = mem_usage;
190 self.peak_mem_allocs = self.num_allocs.saturating_sub(self.num_frees);
191 }
192 }
193
194 #[cfg(feature = "tracing")]
195 self.tracing.record_alloc(ptr, layout);
196 }
197 fn record_free(&mut self, ptr: *const u8, layout: Layout) {
198 let ptr_hash = hash_fn(ptr as usize);
199 let size_hash = hash_fn(layout.size());
200 let align_hash = hash_fn(layout.align());
201 self.ptr_accum = self.ptr_accum.wrapping_sub(ptr_hash);
202 self.ptr_size_accum = self
203 .ptr_size_accum
204 .wrapping_sub(ptr_hash.wrapping_mul(size_hash));
205 self.ptr_align_accum = self
206 .ptr_align_accum
207 .wrapping_sub(ptr_hash.wrapping_mul(align_hash));
208 self.num_frees += 1;
209 self.mem_freed += layout.size() as u64;
210
211 #[cfg(feature = "tracing")]
212 self.tracing.record_free(ptr, layout);
213 }
214 fn start(&mut self) {
215 *self = Default::default();
216 #[cfg(feature = "tracing")]
217 self.tracing.start();
218 }
219
220 fn finish(&mut self) -> AllocInfo {
221 let result = if self.num_allocs > self.num_frees {
222 Err(AllocError::Leak)
223 } else if self.num_allocs < self.num_frees {
224 Err(AllocError::DoubleFree)
225 } else if self.num_allocs == 0 {
226 Err(AllocError::NoData)
227 } else if self.ptr_accum != 0 {
228 Err(AllocError::BadPtr)
229 } else {
230 match (self.ptr_size_accum != 0, self.ptr_align_accum != 0) {
231 (true, true) => Err(AllocError::BadLayout),
232 (true, false) => Err(AllocError::BadSize),
233 (false, true) => Err(AllocError::BadAlignment),
234 (false, false) => Ok(()),
235 }
236 };
237 AllocInfo {
238 result,
239 num_allocs: self.num_allocs,
240 num_frees: self.num_frees,
241 mem_allocated: self.mem_allocated,
242 mem_freed: self.mem_freed,
243 peak_mem: self.peak_mem,
244 peak_mem_allocs: self.peak_mem_allocs,
245 #[cfg(feature = "tracing")]
246 tracing: self.tracing.finish(),
247 }
248 }
249}
250
251thread_local! {
252 static ENABLED: Cell<bool> = Cell::new(false);
253 static LOCAL_STATE: RefCell<LocalState> = RefCell::new(LocalState::default());
254}
255
256pub struct Mockalloc<T: GlobalAlloc>(pub T);
260
261unsafe impl<T: GlobalAlloc> GlobalAlloc for Mockalloc<T> {
262 unsafe fn alloc(&self, layout: std::alloc::Layout) -> *mut u8 {
263 let ptr = self.0.alloc(layout);
264 with_local_state(|state| {
265 state.record_alloc(ptr, layout);
266 });
267 ptr
268 }
269
270 unsafe fn dealloc(&self, ptr: *mut u8, layout: std::alloc::Layout) {
271 with_local_state(|state| {
272 state.record_free(ptr, layout);
273 });
274 self.0.dealloc(ptr, layout);
275 }
276
277 unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
278 let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
281 let new_ptr = self.0.realloc(ptr, layout, new_size);
282 with_local_state(|state| {
283 state.record_free(ptr, layout);
284 state.record_alloc(new_ptr, new_layout);
285 });
286 new_ptr
287 }
288
289 unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
290 let ptr = self.0.alloc_zeroed(layout);
291 with_local_state(|state| {
292 state.record_alloc(ptr, layout);
293 });
294 ptr
295 }
296}
297
298#[derive(Debug, Clone, PartialEq)]
300#[non_exhaustive]
301pub enum AllocError {
302 NoData,
305 Leak,
307 DoubleFree,
309 BadPtr,
312 BadSize,
315 BadAlignment,
318 BadLayout,
321}
322
323#[derive(Debug, Clone)]
326pub struct AllocInfo {
327 num_allocs: u64,
328 num_frees: u64,
329 mem_allocated: u64,
330 mem_freed: u64,
331 peak_mem: u64,
332 peak_mem_allocs: u64,
333 result: Result<(), AllocError>,
334 #[cfg(feature = "tracing")]
335 tracing: tracing::TracingInfo,
336}
337
338impl AllocInfo {
339 pub fn num_allocs(&self) -> u64 {
341 self.num_allocs
342 }
343 pub fn num_frees(&self) -> u64 {
345 self.num_frees
346 }
347 pub fn num_leaks(&self) -> u64 {
349 self.num_allocs - self.num_frees
350 }
351 pub fn mem_allocated(&self) -> u64 {
353 self.mem_allocated
354 }
355 pub fn mem_leaked(&self) -> u64 {
357 self.mem_allocated - self.mem_freed
358 }
359 pub fn mem_freed(&self) -> u64 {
361 self.mem_freed
362 }
363 pub fn peak_mem(&self) -> u64 {
365 self.peak_mem
366 }
367 pub fn peak_mem_allocs(&self) -> u64 {
369 self.peak_mem_allocs
370 }
371 pub fn result(&self) -> Result<(), AllocError> {
373 self.result.clone()
374 }
375 #[cfg(feature = "tracing")]
377 pub fn tracing(&self) -> &tracing::TracingInfo {
378 &self.tracing
379 }
380}
381
382struct AllocChecker(bool);
383
384impl AllocChecker {
385 fn new() -> Self {
386 LOCAL_STATE.with(|rc| rc.borrow_mut().start());
387 ENABLED.with(|c| {
388 assert!(!c.get(), "Mockalloc already recording");
389 c.set(true);
390 });
391 Self(true)
392 }
393 fn finish(mut self) -> AllocInfo {
394 self.0 = false;
395 ENABLED.with(|c| c.set(false));
396 LOCAL_STATE.with(|rc| rc.borrow_mut().finish())
397 }
398}
399
400impl Drop for AllocChecker {
401 fn drop(&mut self) {
402 if self.0 {
403 ENABLED.with(|c| c.set(false));
404 LOCAL_STATE.with(|rc| rc.borrow_mut().finish());
405 }
406 }
407}
408
409pub fn record_allocs(f: impl FnOnce()) -> AllocInfo {
411 let checker = AllocChecker::new();
412 f();
413 checker.finish()
414}
415
416pub fn assert_allocs(f: impl FnOnce()) {
426 if cfg!(miri) {
427 f();
428 } else {
429 let info = record_allocs(f);
430 #[cfg(feature = "tracing")]
431 if info.result.is_err() {
432 eprintln!("# Mockalloc trace:\n\n{:#?}", info.tracing);
433 }
434 info.result.unwrap();
435 }
436}
437
438pub fn is_recording() -> bool {
441 ENABLED.with(|c| c.get())
442}
443
444fn with_local_state(f: impl FnOnce(&mut LocalState)) {
445 if !is_recording() {
446 return;
447 }
448 ENABLED.with(|c| c.set(false));
449 LOCAL_STATE.with(|rc| f(&mut rc.borrow_mut()));
450 ENABLED.with(|c| c.set(true));
451}
452
453pub use mockalloc_macros::test;
454
455#[cfg(test)]
456mod tests {
457 use super::{is_recording, record_allocs, AllocError, Mockalloc};
458 use std::alloc::{GlobalAlloc, Layout, System};
459 use std::{cmp, mem, ptr};
460
461 struct LeakingAllocator(System);
462
463 unsafe impl GlobalAlloc for LeakingAllocator {
464 unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
465 self.0.alloc_zeroed(layout)
466 }
467
468 unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
469 if is_recording() {
470 let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
473 let new_ptr = self.alloc(new_layout);
475 if !new_ptr.is_null() {
476 ptr::copy_nonoverlapping(ptr, new_ptr, cmp::min(layout.size(), new_size));
479 self.dealloc(ptr, layout);
480 }
481 new_ptr
482 } else {
483 self.0.realloc(ptr, layout, new_size)
484 }
485 }
486
487 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
488 self.0.alloc(layout)
489 }
490
491 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
492 if !is_recording() {
493 self.0.dealloc(ptr, layout);
494 }
495 }
496 }
497
498 #[global_allocator]
501 static A: Mockalloc<LeakingAllocator> = Mockalloc(LeakingAllocator(System));
502
503 fn do_some_allocations() -> Vec<Box<i32>> {
504 let mut a = Vec::new();
505 let mut b = Vec::new();
506 for i in 0..32 {
507 let p = Box::new(i);
508 if i % 2 == 0 {
509 a.push(p);
510 } else {
511 b.push(p);
512 }
513 }
514 a
515 }
516
517 #[test]
518 fn it_works() {
519 let alloc_info = record_allocs(|| {
520 let _p = Box::new(42);
521 });
522 alloc_info.result().unwrap();
523 assert_eq!(alloc_info.num_allocs(), 1);
524 assert_eq!(alloc_info.num_frees(), 1);
525 assert_eq!(alloc_info.peak_mem(), 4);
526 assert_eq!(alloc_info.peak_mem_allocs(), 1);
527 }
528
529 #[test]
530 fn it_detects_leak() {
531 let alloc_info = record_allocs(|| {
532 mem::forget(Box::new(42));
533 });
534 assert_eq!(alloc_info.result().unwrap_err(), AllocError::Leak);
535 assert_eq!(alloc_info.num_allocs(), 1);
536 assert_eq!(alloc_info.num_frees(), 0);
537 }
538
539 #[test]
540 fn it_detects_bad_layout() {
541 let alloc_info = record_allocs(|| unsafe {
542 mem::transmute::<_, Box<f64>>(Box::new(42u32));
543 });
544 assert_eq!(alloc_info.result().unwrap_err(), AllocError::BadLayout);
545 assert_eq!(alloc_info.num_allocs(), 1);
546 assert_eq!(alloc_info.num_frees(), 1);
547 }
548
549 #[test]
550 fn it_detects_no_data() {
551 let alloc_info = record_allocs(|| ());
552 assert_eq!(alloc_info.result().unwrap_err(), AllocError::NoData);
553 assert_eq!(alloc_info.num_allocs(), 0);
554 assert_eq!(alloc_info.num_frees(), 0);
555 }
556
557 #[test]
558 fn it_detects_bad_alignment() {
559 let alloc_info = record_allocs(|| unsafe {
560 mem::transmute::<_, Box<[u8; 4]>>(Box::new(42u32));
561 });
562 assert_eq!(alloc_info.result().unwrap_err(), AllocError::BadAlignment);
563 assert_eq!(alloc_info.num_allocs(), 1);
564 assert_eq!(alloc_info.num_frees(), 1);
565 }
566
567 #[test]
568 fn it_detects_bad_size() {
569 let alloc_info = record_allocs(|| unsafe {
570 mem::transmute::<_, Box<[u32; 2]>>(Box::new(42u32));
571 });
572 assert_eq!(alloc_info.result().unwrap_err(), AllocError::BadSize);
573 assert_eq!(alloc_info.num_allocs(), 1);
574 assert_eq!(alloc_info.num_frees(), 1);
575 }
576
577 #[test]
578 fn it_detects_double_free() {
579 let alloc_info = record_allocs(|| unsafe {
580 let mut x = mem::ManuallyDrop::new(Box::new(42));
581 mem::ManuallyDrop::drop(&mut x);
582 mem::ManuallyDrop::drop(&mut x);
583 });
584 assert_eq!(alloc_info.result().unwrap_err(), AllocError::DoubleFree);
585 assert_eq!(alloc_info.num_allocs(), 1);
586 assert_eq!(alloc_info.num_frees(), 2);
587 }
588
589 #[test]
590 fn it_detects_bad_ptr() {
591 let alloc_info = record_allocs(|| unsafe {
592 let mut x = Box::new(42);
593 *mem::transmute::<_, &mut usize>(&mut x) += 1;
594 });
595 assert_eq!(alloc_info.result().unwrap_err(), AllocError::BadPtr);
596 assert_eq!(alloc_info.num_allocs(), 1);
597 assert_eq!(alloc_info.num_frees(), 1);
598 }
599
600 #[test]
601 fn it_works_amongst_many() {
602 let alloc_info = record_allocs(|| {
603 let _unused = do_some_allocations();
604 let _p = Box::new(42);
605 let _unused = do_some_allocations();
606 });
607 alloc_info.result().unwrap();
608 assert_eq!(alloc_info.peak_mem(), 580);
609 assert_eq!(alloc_info.peak_mem_allocs(), 52);
610 }
611
612 #[test]
613 fn it_detects_leak_amongst_many() {
614 let alloc_info = record_allocs(|| {
615 let _unused = do_some_allocations();
616 let p = Box::new(42);
617 let _unused = do_some_allocations();
618 mem::forget(p);
619 let _unused = do_some_allocations();
620 });
621 assert_eq!(alloc_info.result().unwrap_err(), AllocError::Leak);
622 }
623
624 #[test]
625 fn it_detects_bad_layout_amongst_many() {
626 let alloc_info = record_allocs(|| unsafe {
627 let _unused = do_some_allocations();
628 let p = Box::new(42u32);
629 let _unused = do_some_allocations();
630 mem::transmute::<_, Box<f64>>(p);
631 let _unused = do_some_allocations();
632 });
633 assert_eq!(alloc_info.result().unwrap_err(), AllocError::BadLayout);
634 }
635
636 #[test]
637 fn it_detects_bad_alignment_amongst_many() {
638 let alloc_info = record_allocs(|| unsafe {
639 let _unused = do_some_allocations();
640 let p = Box::new(42u32);
641 let _unused = do_some_allocations();
642 mem::transmute::<_, Box<[u8; 4]>>(p);
643 let _unused = do_some_allocations();
644 });
645 assert_eq!(alloc_info.result().unwrap_err(), AllocError::BadAlignment);
646 }
647
648 #[test]
649 fn it_detects_bad_size_amongst_many() {
650 let alloc_info = record_allocs(|| unsafe {
651 let _unused = do_some_allocations();
652 let p = Box::new(42u32);
653 let _unused = do_some_allocations();
654 mem::transmute::<_, Box<[u32; 2]>>(p);
655 let _unused = do_some_allocations();
656 });
657 assert_eq!(alloc_info.result().unwrap_err(), AllocError::BadSize);
658 }
659
660 #[test]
661 fn it_detects_double_free_amongst_many() {
662 let alloc_info = record_allocs(|| unsafe {
663 let _unused = do_some_allocations();
664 let mut x = mem::ManuallyDrop::new(Box::new(42));
665 let _unused = do_some_allocations();
666 mem::ManuallyDrop::drop(&mut x);
667 let _unused = do_some_allocations();
668 mem::ManuallyDrop::drop(&mut x);
669 let _unused = do_some_allocations();
670 });
671 assert_eq!(alloc_info.result().unwrap_err(), AllocError::DoubleFree);
672 }
673
674 #[test]
675 fn it_detects_bad_ptr_amongst_many() {
676 let alloc_info = record_allocs(|| unsafe {
677 let _unused = do_some_allocations();
678 let mut x = Box::new(42);
679 let _unused = do_some_allocations();
680 *mem::transmute::<_, &mut usize>(&mut x) += 1;
681 let _unused = do_some_allocations();
682 });
683 assert_eq!(alloc_info.result().unwrap_err(), AllocError::BadPtr);
684 }
685}