safe_allocator_api/raw_alloc.rs
1//! A safe wrapper around low-level allocation primitives from `alloc::alloc`.
2//!
3//! This crate provides a safe interface for working with raw allocations while maintaining
4//! the same error handling semantics as the underlying allocation APIs.
5use core::ptr::NonNull;
6use core::{alloc::Layout, fmt};
7
8use allocator_api2::alloc::{AllocError, Allocator, Global};
9
10/// A safe wrapper around a raw allocation with known layout.
11///
12/// # Safety
13///
14/// This type ensures that:
15/// - The wrapped pointer is always non-null and properly aligned
16/// - Memory is automatically deallocated when dropped
17/// - Reallocation maintains proper alignment and size constraints
18///
19/// # Example
20///
21/// ```rust
22/// # use core::alloc::Layout;
23/// use safe_allocator_api::RawAlloc;
24///
25/// // Create a new allocation of 1024 bytes
26/// let layout = Layout::array::<u8>(1024).unwrap();
27/// let mut alloc = RawAlloc::new(layout).expect("allocation failed");
28///
29/// // Write some data
30/// unsafe {
31/// core::ptr::write(alloc.as_mut_ptr(), 42u8);
32/// }
33///
34/// // Automatically deallocated when dropped
35/// ```
36pub struct RawAlloc<A: Allocator = Global> {
37 ptr: NonNull<[u8]>,
38 layout: Layout,
39 allocator: A,
40}
41
42impl<A: Allocator> RawAlloc<A> {
43 /// Creates a new allocation with the given layout using the provided allocator.
44 ///
45 /// This is equivalent to calling [`Allocator::allocate`] but provides automatic
46 /// cleanup when the allocation is no longer needed.
47 ///
48 /// # Arguments
49 ///
50 /// * `layout` - The desired memory layout
51 /// * `allocator` - The allocator to use
52 ///
53 /// # Errors
54 ///
55 /// Returns [`AllocError`] if the allocator reports an error or if the layout
56 /// has a size of 0.
57 ///
58 /// # Example
59 ///
60 /// ```rust
61 /// #![feature(allocator_api)]
62 ///
63 /// use core::alloc::Layout;
64 /// use allocator_api2::alloc::*;
65 /// use safe_allocator_api::RawAlloc;
66 ///
67 /// let layout = Layout::new::<u64>();
68 /// let alloc = RawAlloc::new_in(layout, Global)?;
69 /// # Ok::<_, AllocError>(())
70 /// ```
71 pub fn new_in(layout: Layout, allocator: A) -> Result<Self, AllocError> {
72 if layout.size() == 0 {
73 return Err(AllocError);
74 }
75
76 let ptr = allocator.allocate(layout)?;
77
78 Ok(Self {
79 ptr,
80 layout,
81 allocator,
82 })
83 }
84
85 /// Creates a new zeroed allocation with the given layout using the provided allocator.
86 ///
87 /// This is equivalent to calling [`Allocator::allocate_zeroed`] but provides automatic
88 /// cleanup when the allocation is no longer needed.
89 ///
90 /// # Errors
91 ///
92 /// Returns [`AllocError`] if the allocator reports an error or if the layout
93 /// has a size of 0.
94 pub fn new_zeroed_in(layout: Layout, allocator: A) -> Result<Self, AllocError> {
95 if layout.size() == 0 {
96 return Err(AllocError);
97 }
98
99 let ptr = allocator.allocate_zeroed(layout)?;
100
101 Ok(Self {
102 ptr,
103 layout,
104 allocator,
105 })
106 }
107
108 /// Attempts to grow the allocation to the new layout.
109 ///
110 /// # Errors
111 ///
112 /// Returns [`AllocError`] if:
113 /// - The allocator reports an error
114 /// - The new layout has a size of 0
115 /// - The new size is smaller than the current size (use [`Self::shrink`] instead)
116 ///
117 /// # Example
118 ///
119 /// ```rust
120 /// use allocator_api2::alloc::*;
121 /// use safe_allocator_api::RawAlloc;
122 ///
123 /// let layout = Layout::array::<u8>(100).unwrap();
124 /// let mut alloc = RawAlloc::new(layout)?;
125 ///
126 /// // Grow the allocation
127 /// let new_layout = Layout::array::<u8>(200).unwrap();
128 /// alloc.grow(new_layout)?;
129 /// # Ok::<_, AllocError>(())
130 /// ```
131 pub fn grow(&mut self, new_layout: Layout) -> Result<(), AllocError> {
132 if new_layout.size() == 0 {
133 return Err(AllocError);
134 }
135 if new_layout.size() <= self.layout.size() {
136 return Err(AllocError);
137 }
138
139 let new_ptr = unsafe {
140 self.allocator.grow(
141 NonNull::new_unchecked(self.ptr.as_ptr() as *mut u8),
142 self.layout,
143 new_layout,
144 )?
145 };
146
147 self.ptr = new_ptr;
148 self.layout = new_layout;
149 Ok(())
150 }
151
152 /// Attempts to grow the allocation to the new layout, zeroing the additional memory.
153 ///
154 /// This is equivalent to [`Self::grow`] but ensures any additional memory is zeroed.
155 ///
156 /// # Errors
157 ///
158 /// Returns [`AllocError`] if:
159 /// - The allocator reports an error
160 /// - The new layout has a size of 0
161 /// - The new size is smaller than the current size (use [`Self::shrink`] instead)
162 pub fn grow_zeroed(&mut self, new_layout: Layout) -> Result<(), AllocError> {
163 if new_layout.size() == 0 {
164 return Err(AllocError);
165 }
166 if new_layout.size() <= self.layout.size() {
167 return Err(AllocError);
168 }
169
170 let new_ptr = unsafe {
171 self.allocator.grow_zeroed(
172 NonNull::new_unchecked(self.ptr.as_ptr() as *mut u8),
173 self.layout,
174 new_layout,
175 )?
176 };
177
178 self.ptr = new_ptr;
179 self.layout = new_layout;
180 Ok(())
181 }
182
183 /// Attempts to shrink the allocation to the new layout.
184 ///
185 /// # Errors
186 ///
187 /// Returns [`AllocError`] if:
188 /// - The allocator reports an error
189 /// - The new layout has a size of 0
190 /// - The new size is larger than the current size (use [`Self::grow`] instead)
191 ///
192 /// # Example
193 ///
194 /// ```rust
195 /// use allocator_api2::alloc::*;
196 /// use safe_allocator_api::RawAlloc;
197 ///
198 /// let layout = Layout::array::<u8>(200).unwrap();
199 /// let mut alloc = RawAlloc::new(layout)?;
200 ///
201 /// // Shrink the allocation
202 /// let new_layout = Layout::array::<u8>(100).unwrap();
203 /// alloc.shrink(new_layout)?;
204 /// # Ok::<_, AllocError>(())
205 /// ```
206 pub fn shrink(&mut self, new_layout: Layout) -> Result<(), AllocError> {
207 if new_layout.size() == 0 {
208 return Err(AllocError);
209 }
210 if new_layout.size() >= self.layout.size() {
211 return Err(AllocError);
212 }
213
214 let new_ptr = unsafe {
215 self.allocator.shrink(
216 NonNull::new_unchecked(self.ptr.as_ptr() as *mut u8),
217 self.layout,
218 new_layout,
219 )?
220 };
221
222 self.ptr = new_ptr;
223 self.layout = new_layout;
224 Ok(())
225 }
226
227 /// Returns a raw pointer to the allocated memory.
228 ///
229 /// # Safety
230 ///
231 /// The caller must ensure that the memory is accessed according to
232 /// the original layout constraints.
233 pub fn as_ptr(&self) -> *const u8 {
234 self.ptr.as_ptr() as *const u8
235 }
236
237 /// Returns a raw mutable pointer to the allocated memory.
238 ///
239 /// # Safety
240 ///
241 /// The caller must ensure that the memory is accessed according to
242 /// the original layout constraints.
243 pub fn as_mut_ptr(&mut self) -> *mut u8 {
244 self.ptr.as_ptr() as *mut u8
245 }
246
247 /// Returns a slice reference to the allocated memory.
248 ///
249 /// This provides a safe interface to access the allocated memory as a byte slice.
250 ///
251 /// # Example
252 ///
253 /// ```rust
254 /// use allocator_api2::alloc::*;
255 /// use safe_allocator_api::RawAlloc;
256 ///
257 /// let layout = Layout::array::<u8>(100).unwrap();
258 /// let alloc = RawAlloc::new(layout)?;
259 /// let slice = alloc.as_slice();
260 /// assert_eq!(slice.len(), 100);
261 /// # Ok::<_, AllocError>(())
262 /// ```
263 pub fn as_slice(&self) -> &[u8] {
264 unsafe { core::slice::from_raw_parts(self.as_ptr(), self.layout.size()) }
265 }
266
267 /// Returns a mutable slice reference to the allocated memory.
268 ///
269 /// This provides a safe interface to access the allocated memory as a mutable byte slice.
270 ///
271 /// # Example
272 ///
273 /// ```rust
274 /// use allocator_api2::alloc::*;
275 /// use safe_allocator_api::RawAlloc;
276 ///
277 /// let layout = Layout::array::<u8>(100).unwrap();
278 /// let mut alloc = RawAlloc::new(layout)?;
279 /// let slice = alloc.as_mut_slice();
280 /// slice[0] = 42;
281 /// assert_eq!(slice[0], 42);
282 /// # Ok::<_, AllocError>(())
283 /// ```
284 pub fn as_mut_slice(&mut self) -> &mut [u8] {
285 unsafe { core::slice::from_raw_parts_mut(self.as_mut_ptr(), self.layout.size()) }
286 }
287
288 /// Returns the layout used for this allocation.
289 pub fn layout(&self) -> Layout {
290 self.layout
291 }
292
293 /// Represents the length of the allocation.
294 ///
295 /// # Remarks
296 ///
297 /// This is the length with which the allocation was created with, extracted
298 /// from the [`Layout`]; in practice, due to alignment, the number of available
299 /// bytes may be slightly larger in practice; but you shouldn't rely on that.
300 pub fn len(&self) -> usize {
301 self.layout.size()
302 }
303
304 /// Checks if the allocation is empty.
305 pub fn is_empty(&self) -> bool {
306 self.len() == 0
307 }
308}
309
310impl<A: Allocator> Drop for RawAlloc<A> {
311 fn drop(&mut self) {
312 unsafe {
313 self.allocator.deallocate(
314 NonNull::new_unchecked(self.ptr.as_ptr() as *mut u8),
315 self.layout,
316 );
317 }
318 }
319}
320
321impl<A: Allocator> fmt::Debug for RawAlloc<A> {
322 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
323 f.debug_struct("RawAlloc")
324 .field("ptr", &self.ptr)
325 .field("layout", &self.layout)
326 .finish()
327 }
328}
329
330// Convenience constructors using the Global allocator
331impl RawAlloc {
332 /// Creates a new allocation with the given layout using the global allocator.
333 ///
334 /// This is equivalent to calling [`Self::new_in`] with the global allocator.
335 pub fn new(layout: Layout) -> Result<Self, AllocError> {
336 Self::new_in(layout, Global)
337 }
338
339 /// Creates a new zeroed allocation with the given layout using the global allocator.
340 ///
341 /// This is equivalent to calling [`Self::new_zeroed_in`] with the global allocator.
342 pub fn new_zeroed(layout: Layout) -> Result<Self, AllocError> {
343 Self::new_zeroed_in(layout, Global)
344 }
345}
346
347// Cannot implement Send + Sync automatically due to the raw pointer
348// Users must opt-in by implementing these traits based on their usage
349unsafe impl<A: Allocator> Send for RawAlloc<A> {}
350unsafe impl<A: Allocator> Sync for RawAlloc<A> {}
351
352#[cfg(test)]
353mod tests {
354 use super::*;
355 use core::mem::size_of;
356
357 #[test]
358 fn zero_sized_alloc_returns_error() {
359 let layout = Layout::from_size_align(0, 1).unwrap();
360 assert!(RawAlloc::new(layout).is_err());
361 }
362
363 #[test]
364 fn basic_alloc_and_write() {
365 let layout = Layout::new::<u32>();
366 let mut alloc = RawAlloc::new(layout).unwrap();
367
368 unsafe {
369 core::ptr::write(alloc.as_mut_ptr() as *mut u32, 0xDEADBEEF);
370 assert_eq!(core::ptr::read(alloc.as_ptr() as *const u32), 0xDEADBEEF);
371 }
372 }
373
374 #[test]
375 fn zeroed_allocation() {
376 let size = 1024;
377 let layout = Layout::array::<u8>(size).unwrap();
378 let alloc = RawAlloc::new_zeroed(layout).unwrap();
379
380 unsafe {
381 let slice = core::slice::from_raw_parts(alloc.as_ptr(), size);
382 assert!(slice.iter().all(|&x| x == 0));
383 }
384 }
385
386 #[test]
387 fn custom_allocator() {
388 let layout = Layout::new::<i32>();
389 let alloc = RawAlloc::new_in(layout, Global).unwrap();
390 assert_eq!(alloc.layout().size(), size_of::<i32>());
391 }
392
393 #[test]
394 fn array_allocation() {
395 let elements = 100;
396 let layout = Layout::array::<u64>(elements).unwrap();
397 let mut alloc = RawAlloc::new(layout).unwrap();
398
399 unsafe {
400 let slice = core::slice::from_raw_parts_mut(alloc.as_mut_ptr() as *mut u64, elements);
401
402 for (i, item) in slice.iter_mut().enumerate() {
403 *item = i as u64;
404 }
405
406 assert_eq!(slice[42], 42);
407 }
408 }
409
410 #[test]
411 fn alignment_requirements() {
412 let align = 64; // Test a large alignment
413 let size = 128;
414 let layout = Layout::from_size_align(size, align).unwrap();
415 let alloc = RawAlloc::new(layout).unwrap();
416
417 let addr = alloc.as_ptr() as usize;
418 assert_eq!(addr % align, 0, "Allocation not properly aligned");
419 }
420
421 #[test]
422 fn multiple_allocations() {
423 let layout = Layout::new::<u8>();
424 let mut allocations = Vec::new();
425
426 // Create many allocations to stress the allocator
427 for i in 0..100 {
428 let mut alloc = RawAlloc::new(layout).unwrap();
429 unsafe {
430 core::ptr::write(alloc.as_mut_ptr(), i as u8);
431 }
432 allocations.push(alloc);
433 }
434
435 // Verify each allocation is independent
436 for (i, alloc) in allocations.iter().enumerate() {
437 unsafe {
438 assert_eq!(core::ptr::read(alloc.as_ptr()), i as u8);
439 }
440 }
441 }
442
443 #[test]
444 fn oversized_allocation() {
445 // Try to allocate a very large size (but not so large it would definitely fail)
446 let layout = Layout::array::<u8>(1024 * 1024).unwrap();
447 let result = RawAlloc::new(layout);
448
449 // We don't assert success or failure here, as it depends on the system,
450 // but we verify it doesn't panic
451 let _ = result.is_ok();
452 }
453
454 #[test]
455 fn grow_allocation() {
456 let initial_size = 100;
457 let layout = Layout::array::<u8>(initial_size).unwrap();
458 let mut alloc = RawAlloc::new(layout).unwrap();
459
460 // Write some data
461 unsafe {
462 let slice = core::slice::from_raw_parts_mut(alloc.as_mut_ptr(), initial_size);
463 slice[0] = 42;
464 }
465
466 // Grow the allocation
467 let new_size = 200;
468 let new_layout = Layout::array::<u8>(new_size).unwrap();
469 alloc.grow(new_layout).unwrap();
470
471 // Verify the data is preserved
472 unsafe {
473 let slice = core::slice::from_raw_parts(alloc.as_ptr(), new_size);
474 assert_eq!(slice[0], 42);
475 }
476 }
477
478 #[test]
479 fn grow_zeroed_allocation() {
480 let initial_size = 100;
481 let layout = Layout::array::<u8>(initial_size).unwrap();
482 let mut alloc = RawAlloc::new(layout).unwrap();
483
484 // Write some data
485 unsafe {
486 let slice = core::slice::from_raw_parts_mut(alloc.as_mut_ptr(), initial_size);
487 slice[0] = 42;
488 }
489
490 // Grow the allocation
491 let new_size = 200;
492 let new_layout = Layout::array::<u8>(new_size).unwrap();
493 alloc.grow_zeroed(new_layout).unwrap();
494
495 unsafe {
496 let slice = core::slice::from_raw_parts(alloc.as_ptr(), new_size);
497 // Verify original data is preserved
498 assert_eq!(slice[0], 42);
499 // Verify new memory is zeroed
500 assert!(slice[initial_size..].iter().all(|&x| x == 0));
501 }
502 }
503
504 #[test]
505 fn shrink_allocation() {
506 let initial_size = 200;
507 let layout = Layout::array::<u8>(initial_size).unwrap();
508 let mut alloc = RawAlloc::new(layout).unwrap();
509
510 // Write some data
511 unsafe {
512 let slice = core::slice::from_raw_parts_mut(alloc.as_mut_ptr(), initial_size);
513 slice[0] = 42;
514 }
515
516 // Shrink the allocation
517 let new_size = 100;
518 let new_layout = Layout::array::<u8>(new_size).unwrap();
519 alloc.shrink(new_layout).unwrap();
520
521 // Verify the data is preserved
522 unsafe {
523 let slice = core::slice::from_raw_parts(alloc.as_ptr(), new_size);
524 assert_eq!(slice[0], 42);
525 }
526 }
527
528 #[test]
529 fn grow_zero_size_fails() {
530 let layout = Layout::array::<u8>(100).unwrap();
531 let mut alloc = RawAlloc::new(layout).unwrap();
532
533 let new_layout = Layout::from_size_align(0, 1).unwrap();
534 assert!(alloc.grow(new_layout).is_err());
535 }
536
537 #[test]
538 fn shrink_zero_size_fails() {
539 let layout = Layout::array::<u8>(100).unwrap();
540 let mut alloc = RawAlloc::new(layout).unwrap();
541
542 let new_layout = Layout::from_size_align(0, 1).unwrap();
543 assert!(alloc.shrink(new_layout).is_err());
544 }
545
546 #[test]
547 fn grow_smaller_size_fails() {
548 let layout = Layout::array::<u8>(200).unwrap();
549 let mut alloc = RawAlloc::new(layout).unwrap();
550
551 let new_layout = Layout::array::<u8>(100).unwrap();
552 assert!(alloc.grow(new_layout).is_err());
553 }
554
555 #[test]
556 fn shrink_larger_size_fails() {
557 let layout = Layout::array::<u8>(100).unwrap();
558 let mut alloc = RawAlloc::new(layout).unwrap();
559
560 let new_layout = Layout::array::<u8>(200).unwrap();
561 assert!(alloc.shrink(new_layout).is_err());
562 }
563}