portable_dlmalloc/
lib.rs

1#![no_std]
2
3use core::{alloc::*, ffi::c_void, ptr::null_mut, sync::atomic::*};
4
5/// This module defines C FFI definitions of dlmalloc library.
6/// Use this library only if you understand the safety.
7pub mod raw;
8use raw::*;
9
10unsafe extern "C"
11{
12	fn memcpy(dest:*mut c_void,src:*const c_void,cb:usize)->*mut c_void;
13}
14
15/// ## DLMalloc allocator
16/// This is the default allocator.
17pub struct DLMalloc;
18
19unsafe impl GlobalAlloc for DLMalloc
20{
21	unsafe fn alloc(&self, layout: Layout) -> *mut u8
22	{
23		unsafe
24		{
25			dlmemalign(layout.align(),layout.size()).cast()
26		}
27	}
28
29	unsafe fn dealloc(&self, ptr: *mut u8, _layout: Layout)
30	{
31		unsafe
32		{
33			dlfree(ptr.cast())
34		}
35	}
36
37	unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8
38	{
39		// We can optimize the `realloc` method by trying realloc-in-place.
40		let p=unsafe{dlrealloc_in_place(ptr.cast(),new_size)};
41		if p==ptr.cast()
42		{
43			// In-place reallocation is successful. Just return the original pointer.
44			ptr
45		}
46		else
47		{
48			// Failed to reallocate in-place! Try to allocate a new chunk.
49			assert!(p.is_null(),"dlrealloc_in_place returned Non-Null pointer on failure!");
50			let p=unsafe{dlmemalign(layout.align(),new_size)};
51			if !p.is_null()
52			{
53				unsafe
54				{
55					// Copy and free the old chunk.
56					memcpy(p,ptr.cast(),layout.size());
57					dlfree(ptr.cast());
58				}
59			}
60			p.cast()
61		}
62	}
63}
64
65/// ## MspaceAlloc allocator
66/// This allocator allows you to use a initial capacity bigger than the default granularity.
67pub struct MspaceAlloc
68{
69	mspace:AtomicPtr<c_void>,
70	init:AtomicBool,
71	capacity:usize
72}
73
74unsafe impl GlobalAlloc for MspaceAlloc
75{
76	unsafe fn alloc(&self, layout: Layout) -> *mut u8
77	{
78		// Lazily initialize mspace.
79		if self.init.compare_exchange(false,true,Ordering::Acquire,Ordering::Relaxed).is_ok()
80		{
81			self.mspace.store(unsafe{create_mspace(self.capacity,1)},Ordering::Release);
82		}
83		unsafe{mspace_memalign(self.mspace.load(Ordering::Acquire),layout.align(),layout.size()).cast()}
84	}
85
86	unsafe fn dealloc(&self, ptr: *mut u8, _layout: Layout)
87	{
88		unsafe{mspace_free(self.mspace.load(Ordering::Acquire),ptr.cast())}
89	}
90
91	unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8
92	{
93		// We can optimize the `realloc` method by trying realloc-in-place.
94		let p=unsafe{mspace_realloc_in_place(self.mspace.load(Ordering::Acquire),ptr.cast(),new_size)};
95		if p==ptr.cast()
96		{
97			// In-place reallocation is successful. Just return the original pointer.
98			ptr
99		}
100		else
101		{
102			// Failed to reallocate in-place! Try to allocate a new chunk.
103			assert!(p.is_null(),"mspace_realloc_in_place returned Non-Null pointer on failure!");
104			let p=unsafe{mspace_memalign(self.mspace.load(Ordering::Acquire),layout.size(),new_size)};
105			if !p.is_null()
106			{
107				unsafe
108				{
109					// Copy and free the old chunk.
110					memcpy(p,ptr.cast(),layout.size());
111					mspace_free(self.mspace.load(Ordering::Acquire),ptr.cast());
112				}
113			}
114			p.cast()
115		}
116	}
117}
118
119impl MspaceAlloc
120{
121	/// ## `new` method
122	/// Initialize `MspaceAlloc` object. Use this method to initialize a static variable annotated as the global allocator.
123	pub const fn new(capacity:usize)->Self
124	{
125		Self
126		{
127			// The `mspace` will be lazily initialized.
128			mspace:AtomicPtr::new(null_mut()),
129			init:AtomicBool::new(false),
130			capacity
131		}
132	}
133
134	/// ## `destroy` method
135	/// Destroys `MspaceAlloc` object.
136	/// 
137	/// ## Safety
138	/// You must ensure all allocated objects are dropped before destroying the allocator!
139	pub unsafe fn destroy(&self)
140	{
141		if self.init.compare_exchange(true,false,Ordering::Acquire,Ordering::Relaxed).is_ok()
142		{
143			unsafe
144			{
145				destroy_mspace(self.mspace.load(Ordering::Acquire));
146			}
147		}
148	}
149}