portable_dlmalloc/
lib.rs

1#![no_std]
2#![feature(allocator_api)]
3
4use core::{alloc::*, ffi::c_void, ptr::null_mut, sync::atomic::*};
5
6/// This module defines C FFI definitions of dlmalloc library.
7/// Use this library only if you understand the safety.
8pub mod raw;
9use raw::*;
10
11/// This module contains the alternate allocator API.
12#[cfg(feature="alt-alloc")] pub mod alt_alloc;
13
14unsafe extern "C"
15{
16	fn memcpy(dest:*mut c_void,src:*const c_void,cb:usize)->*mut c_void;
17}
18
19/// ## DLMalloc allocator
20/// This is the default allocator.
21pub struct DLMalloc;
22
23unsafe impl GlobalAlloc for DLMalloc
24{
25	unsafe fn alloc(&self, layout: Layout) -> *mut u8
26	{
27		unsafe{dlmemalign(layout.align(),layout.size()).cast()}
28	}
29
30	unsafe fn dealloc(&self, ptr: *mut u8, _layout: Layout)
31	{
32		unsafe{dlfree(ptr.cast())};
33	}
34
35	unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8
36	{
37		// The `dlrealloc` function does not guarantee the original alignment!
38		let p=unsafe{dlrealloc_in_place(ptr.cast(),new_size)};
39		if p==ptr.cast()
40		{
41			// In-place allocation is successful. Just return the pointer.
42			ptr
43		}
44		else
45		{
46			// In-place allocation failed. Allocate a new pointer.
47			assert!(p.is_null(),"dlrealloc_in_place returned non-null pointer on return!");
48			let p=unsafe{dlmemalign(layout.align(),new_size)};
49			unsafe
50			{
51				memcpy(p,ptr.cast(),layout.size());
52				dlfree(ptr.cast());
53			}
54			p.cast()
55		}
56	}
57}
58
59/// ## MspaceAlloc allocator
60/// This allocator allows you to use a initial capacity bigger than the default granularity.
61pub struct MspaceAlloc
62{
63	mspace:AtomicPtr<c_void>,
64	init:AtomicBool,
65	capacity:usize
66}
67
68unsafe impl GlobalAlloc for MspaceAlloc
69{
70	unsafe fn alloc(&self, layout: Layout) -> *mut u8
71	{
72		// Lazily initialize mspace.
73		if self.init.compare_exchange(false,true,Ordering::Acquire,Ordering::Relaxed).is_ok()
74		{
75			self.mspace.store(unsafe{create_mspace(self.capacity,1)},Ordering::Release);
76		}
77		unsafe{mspace_memalign(self.mspace.load(Ordering::Acquire),layout.align(),layout.size()).cast()}
78	}
79
80	unsafe fn dealloc(&self, ptr: *mut u8, _layout: Layout)
81	{
82		unsafe{mspace_free(self.mspace.load(Ordering::Acquire),ptr.cast())}
83	}
84
85	unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8
86	{
87		let p=unsafe{mspace_realloc_in_place(self.mspace.load(Ordering::Acquire),ptr.cast(),new_size)};
88		if p==ptr.cast()
89		{
90			// In-place allocation is successful. Just return the pointer.
91			ptr
92		}
93		else
94		{
95			// In-place allocation failed. Allocate a new pointer.
96			assert!(p.is_null(),"mspace_realloc_in_place returned non-null pointer on return!");
97			let p=unsafe{mspace_memalign(self.mspace.load(Ordering::Acquire),layout.align(),new_size)};
98			unsafe
99			{
100				memcpy(p,ptr.cast(),layout.size());
101				mspace_free(self.mspace.load(Ordering::Acquire),ptr.cast());
102			}
103			p.cast()
104		}
105	}
106}
107
108impl MspaceAlloc
109{
110	/// ## `new` method
111	/// Initialize `MspaceAlloc` object. Use this method to initialize a static variable annotated as the global allocator.
112	pub const fn new(capacity:usize)->Self
113	{
114		Self
115		{
116			// The `mspace` will be lazily initialized.
117			mspace:AtomicPtr::new(null_mut()),
118			init:AtomicBool::new(false),
119			capacity
120		}
121	}
122
123	/// ## `destroy` method
124	/// Destroys `MspaceAlloc` object. All `mmap`ed pages will be released.
125	/// 
126	/// ## Safety
127	/// You must ensure all allocated objects (Vec, Box, etc.) are dropped before destroying the allocator!
128	pub unsafe fn destroy(&self)
129	{
130		if self.init.compare_exchange(true,false,Ordering::Acquire,Ordering::Relaxed).is_ok()
131		{
132			unsafe{destroy_mspace(self.mspace.load(Ordering::Acquire))};
133		}
134	}
135}