Skip to main content

arena_alligator/
arena.rs

1use std::alloc::Layout;
2use std::fmt;
3use std::num::NonZeroUsize;
4
5use crate::bitmap::AtomicBitmap;
6use crate::buffer::Buffer;
7use crate::error::{AllocError, BuildError};
8use crate::metrics::{FixedArenaMetrics, MetricsState};
9use crate::sync::Arc;
10
11/// Page size used for prefaulting the arena backing allocation.
12///
13/// [`build()`](crate::FixedArenaBuilder::build) touches every page at
14/// build time when the page size is known ([`Auto`](Self::Auto) or
15/// [`Size`](Self::Size)). Use
16/// [`build_unfaulted()`](crate::FixedArenaBuilder::build_unfaulted) to
17/// defer faulting for explicit control (e.g. NUMA placement).
18///
19/// # NUMA placement
20///
21/// The kernel allocates physical pages on the node where the faulting
22/// thread runs. Three approaches:
23///
24/// 1. **Pin the builder thread** and call `build()`. Pages fault on the
25///    pinned node immediately.
26/// 2. **`build_unfaulted()`** and call
27///    [`fault_pages()`](crate::Unfaulted::fault_pages) from a thread
28///    pinned to the target node.
29/// 3. **`build_unfaulted().into_inner()`** and let the kernel
30///    demand-fault pages as each thread touches them (first-touch policy).
31///
32/// # Huge pages
33///
34/// Transparent huge pages (THP) are handled by the kernel and work with
35/// any page size here. For pre-allocated huge pages, pass the huge-page
36/// size (e.g. 2 MiB) via [`Size`](Self::Size).
37#[derive(Debug, Clone, Copy, PartialEq, Eq)]
38pub enum PageSize {
39    /// Page size is not known. No prefaulting will occur.
40    Unknown,
41    /// Detect page size from the OS via `sysconf(_SC_PAGESIZE)`.
42    ///
43    /// Only available on Unix with the `libc` feature enabled.
44    #[cfg(all(unix, feature = "libc"))]
45    Auto,
46    /// Caller-supplied page size.
47    Size(NonZeroUsize),
48}
49
50impl PageSize {
51    pub(crate) fn resolve(self) -> Option<usize> {
52        match self {
53            PageSize::Unknown => None,
54            #[cfg(all(unix, feature = "libc"))]
55            PageSize::Auto => Some(os_page_size()),
56            PageSize::Size(n) => Some(n.get()),
57        }
58    }
59}
60
61#[cfg(all(unix, feature = "libc"))]
62fn os_page_size() -> usize {
63    // SAFETY: sysconf(_SC_PAGESIZE) is always safe and returns a positive value.
64    let ps = unsafe { libc::sysconf(libc::_SC_PAGESIZE) };
65    debug_assert!(ps > 0);
66    ps as usize
67}
68
69/// Touch one byte per page to force physical backing.
70pub(crate) fn prefault_region(ptr: *mut u8, len: usize, page_size: usize) {
71    let mut offset = 0;
72    while offset < len {
73        // SAFETY: ptr..ptr+len is a valid allocation. Each write is within bounds.
74        unsafe { ptr.add(offset).write_volatile(0) };
75        offset += page_size;
76    }
77}
78
79/// Initialization policy applied to each buffer on allocate.
80///
81/// Controls whether arena memory is initialized before it is handed to the
82/// caller. The default ([`Uninit`](Self::Uninit)) leaves memory as-is for
83/// maximum throughput.
84#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
85pub enum InitPolicy {
86    /// Leave memory uninitialized (default).
87    #[default]
88    Uninit,
89    /// Zero-fill the allocation region before returning the buffer.
90    Zero,
91}
92
93/// An arena whose backing pages have not yet been faulted.
94///
95/// Created by [`FixedArenaBuilder::build_unfaulted()`] or
96/// [`BuddyArenaBuilder::build_unfaulted()`](crate::BuddyArenaBuilder::build_unfaulted).
97///
98/// - [`fault_pages()`](Self::fault_pages) walks every page explicitly, then
99///   returns the arena. Intended for use from a NUMA-pinned thread.
100/// - [`into_inner()`](Self::into_inner) skips the walk. The kernel
101///   demand-faults pages on first access (first-touch policy).
102/// - [`allocate()`](Unfaulted::<FixedArena>::allocate) unwraps into the arena
103///   and allocates immediately.
104pub struct Unfaulted<A> {
105    ptr: *mut u8,
106    total_size: usize,
107    page_size: Option<usize>,
108    inner: A,
109}
110
111// SAFETY: the inner arena is Send, and the raw ptr is anchored by it.
112unsafe impl<A: Send> Send for Unfaulted<A> {}
113
114impl<A> Unfaulted<A> {
115    pub(crate) fn new(ptr: *mut u8, total_size: usize, page_size: Option<usize>, inner: A) -> Self {
116        Self {
117            ptr,
118            total_size,
119            page_size,
120            inner,
121        }
122    }
123
124    /// Walk every page in the backing allocation to force physical backing,
125    /// then return the unwrapped arena.
126    ///
127    /// Sequential faulting (low-to-high) is friendlier to TLB prefetchers
128    /// and gives the kernel a better chance at physically contiguous frames.
129    pub fn fault_pages(self) -> A {
130        if let Some(ps) = self.page_size {
131            prefault_region(self.ptr, self.total_size, ps);
132        }
133        self.inner
134    }
135
136    /// Unwrap the arena without faulting. Pages will be demand-faulted by
137    /// the kernel on first access.
138    pub fn into_inner(self) -> A {
139        self.inner
140    }
141}
142
143impl Unfaulted<FixedArena> {
144    /// Unwrap without faulting and allocate immediately.
145    ///
146    /// Pages will be demand-faulted by the kernel as written.
147    pub fn allocate(self) -> Result<(FixedArena, Buffer), AllocError> {
148        let arena = self.into_inner();
149        let buf = arena.allocate()?;
150        Ok((arena, buf))
151    }
152}
153
154/// Shared builder configuration for both arena types.
155pub(crate) struct BuildConfig {
156    pub(crate) alignment: usize,
157    pub(crate) auto_spill: bool,
158    pub(crate) init_policy: InitPolicy,
159    pub(crate) page_size: PageSize,
160}
161
162impl BuildConfig {
163    pub(crate) fn new() -> Self {
164        Self {
165            alignment: 1,
166            auto_spill: false,
167            init_policy: InitPolicy::default(),
168            #[cfg(all(unix, feature = "libc"))]
169            page_size: PageSize::Auto,
170            #[cfg(not(all(unix, feature = "libc")))]
171            page_size: PageSize::Unknown,
172        }
173    }
174
175    pub(crate) fn validate_alignment(&self) -> Result<(), BuildError> {
176        if !self.alignment.is_power_of_two() {
177            return Err(BuildError::InvalidAlignment);
178        }
179        Ok(())
180    }
181}
182
183pub(crate) struct ArenaInner {
184    pub(crate) ptr: *mut u8,
185    layout: Layout,
186    pub(crate) slot_capacity: usize,
187    pub(crate) slot_count: usize,
188    pub(crate) bitmap: AtomicBitmap,
189    pub(crate) auto_spill: bool,
190    pub(crate) init_policy: InitPolicy,
191    pub(crate) metrics: MetricsState,
192    #[cfg(feature = "async-alloc")]
193    pub(crate) wake_handle: Option<crate::async_alloc::WakeHandle>,
194}
195
196// SAFETY: Buffer discipline enforces exclusive access per slot:
197// - Writing: one Buffer per slot index (bitmap claim enforced)
198// - Frozen: immutable access through Bytes (buffer consumed by freeze)
199// - No overlap between slots (each slot is at a distinct offset)
200unsafe impl Send for ArenaInner {}
201unsafe impl Sync for ArenaInner {}
202
203impl Drop for ArenaInner {
204    fn drop(&mut self) {
205        // SAFETY: ptr and layout were produced by std::alloc::alloc in build().
206        unsafe {
207            std::alloc::dealloc(self.ptr, self.layout);
208        }
209    }
210}
211
212/// Fixed-size slot arena allocator.
213///
214/// All slots have identical capacity. Allocation is lock-free via atomic
215/// bitmap. Produces `bytes::Bytes` through [`Buffer::freeze()`].
216///
217/// Cheap to clone — clones share the same backing memory via `Arc`.
218#[derive(Clone)]
219pub struct FixedArena {
220    pub(crate) inner: Arc<ArenaInner>,
221}
222
223impl fmt::Debug for FixedArena {
224    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
225        f.debug_struct("FixedArena")
226            .field("slot_count", &self.inner.slot_count)
227            .field("slot_capacity", &self.inner.slot_capacity)
228            .finish()
229    }
230}
231
232impl FixedArena {
233    /// Create a builder with per-slot capacity in bytes.
234    pub fn with_slot_capacity(
235        slot_count: NonZeroUsize,
236        slot_capacity: NonZeroUsize,
237    ) -> FixedArenaBuilder {
238        FixedArenaBuilder {
239            slot_count,
240            slot_capacity,
241            config: BuildConfig::new(),
242        }
243    }
244
245    /// Create a builder with total arena capacity in bytes.
246    ///
247    /// Per-slot capacity is derived as `ceil(total / slot_count)`, then
248    /// rounded up to alignment at build time.
249    ///
250    /// ```
251    /// use std::num::NonZeroUsize;
252    /// use arena_alligator::FixedArena;
253    ///
254    /// let arena = FixedArena::with_arena_capacity(
255    ///     NonZeroUsize::new(4).unwrap(),
256    ///     NonZeroUsize::new(1000).unwrap(),
257    /// ).build().unwrap();
258    /// assert_eq!(arena.slot_count(), 4);
259    /// assert_eq!(arena.slot_capacity(), 250); // ceil(1000 / 4)
260    /// ```
261    pub fn with_arena_capacity(
262        slot_count: NonZeroUsize,
263        arena_capacity: NonZeroUsize,
264    ) -> FixedArenaBuilder {
265        let per_slot = arena_capacity.get().div_ceil(slot_count.get());
266        FixedArenaBuilder {
267            slot_count,
268            // per_slot >= 1 because arena_capacity >= 1 and slot_count >= 1
269            slot_capacity: NonZeroUsize::new(per_slot).unwrap(),
270            config: BuildConfig::new(),
271        }
272    }
273
274    /// Number of slots in this arena.
275    pub fn slot_count(&self) -> usize {
276        self.inner.slot_count
277    }
278
279    /// Capacity of each slot in bytes (aligned).
280    pub fn slot_capacity(&self) -> usize {
281        self.inner.slot_capacity
282    }
283
284    /// Snapshot current allocator metrics.
285    pub fn metrics(&self) -> FixedArenaMetrics {
286        self.inner.metrics.fixed_snapshot()
287    }
288
289    /// Allocate a buffer. Returns `Err(AllocError::ArenaFull)` if all slots are in use.
290    pub fn allocate(&self) -> Result<Buffer, AllocError> {
291        let Some(slot_idx) = self.inner.bitmap.try_alloc() else {
292            self.inner.metrics.record_alloc_failure();
293            return Err(AllocError::ArenaFull);
294        };
295
296        let offset = slot_idx * self.inner.slot_capacity;
297
298        match self.inner.init_policy {
299            InitPolicy::Zero => {
300                // SAFETY: ptr+offset..ptr+offset+slot_capacity is within the arena allocation
301                // and exclusively owned by this slot (bitmap claim enforced above).
302                unsafe {
303                    self.inner
304                        .ptr
305                        .add(offset)
306                        .write_bytes(0, self.inner.slot_capacity);
307                }
308            }
309            InitPolicy::Uninit => {}
310        }
311
312        self.inner
313            .metrics
314            .record_alloc_success(self.inner.slot_capacity);
315
316        Ok(Buffer::new_fixed(
317            Arc::clone(&self.inner),
318            slot_idx,
319            offset,
320            self.inner.slot_capacity,
321        ))
322    }
323}
324
325/// Builder for [`FixedArena`].
326///
327/// Created via [`FixedArena::with_slot_capacity()`] or
328/// [`FixedArena::with_arena_capacity()`].
329pub struct FixedArenaBuilder {
330    slot_count: NonZeroUsize,
331    slot_capacity: NonZeroUsize,
332    config: BuildConfig,
333}
334
335impl FixedArenaBuilder {
336    /// Alignment for arena backing, slot boundaries, and slot capacities.
337    ///
338    /// Must be a power of 2. Default: 1 (no alignment constraint).
339    /// Use 4096 for O_DIRECT / DMA compatibility.
340    pub fn alignment(mut self, n: usize) -> Self {
341        self.config.alignment = n;
342        self
343    }
344
345    /// Enable auto-spill: overflow writes copy to heap, freeing the arena slot.
346    pub fn auto_spill(mut self) -> Self {
347        self.config.auto_spill = true;
348        self
349    }
350
351    /// Set the initialization policy for allocated buffers.
352    ///
353    /// Default: [`InitPolicy::Uninit`]. When set to [`InitPolicy::Zero`],
354    /// every call to [`FixedArena::allocate()`] writes zeroes across the
355    /// slot before returning the buffer.
356    pub fn init_policy(mut self, policy: InitPolicy) -> Self {
357        self.config.init_policy = policy;
358        self
359    }
360
361    /// Set the page size used for prefaulting.
362    ///
363    /// Default: [`PageSize::Auto`] on Unix with the `libc` feature,
364    /// [`PageSize::Unknown`] otherwise.
365    ///
366    /// When set to [`PageSize::Auto`] or [`PageSize::Size`], [`build()`](Self::build)
367    /// touches every page at build time. Use [`build_unfaulted()`](Self::build_unfaulted)
368    /// to defer the walk (e.g. for NUMA placement).
369    pub fn page_size(mut self, policy: PageSize) -> Self {
370        self.config.page_size = policy;
371        self
372    }
373    /// Build the arena, prefaulting pages if a page size is configured.
374    pub fn build(self) -> Result<FixedArena, BuildError> {
375        let page_size = self.config.page_size.resolve();
376        let arena = self.build_inner(
377            #[cfg(feature = "async-alloc")]
378            None,
379        )?;
380        if let Some(ps) = page_size {
381            prefault_region(
382                arena.inner.ptr,
383                arena.inner.slot_count * arena.inner.slot_capacity,
384                ps,
385            );
386        }
387        Ok(arena)
388    }
389
390    /// Build the arena without prefaulting. Returns an [`Unfaulted`] wrapper.
391    ///
392    /// See [`Unfaulted`] for the three consumption paths: explicit fault,
393    /// demand-fault, or direct allocate.
394    pub fn build_unfaulted(self) -> Result<Unfaulted<FixedArena>, BuildError> {
395        let page_size = self.config.page_size.resolve();
396        let arena = self.build_inner(
397            #[cfg(feature = "async-alloc")]
398            None,
399        )?;
400        let total_size = arena.inner.slot_count * arena.inner.slot_capacity;
401        Ok(Unfaulted::new(
402            arena.inner.ptr,
403            total_size,
404            page_size,
405            arena,
406        ))
407    }
408
409    fn build_inner(
410        self,
411        #[cfg(feature = "async-alloc")] wake_handle: Option<crate::async_alloc::WakeHandle>,
412    ) -> Result<FixedArena, BuildError> {
413        self.config.validate_alignment()?;
414
415        let slot_count = self.slot_count.get();
416        let slot_capacity = self.slot_capacity.get();
417
418        let aligned_capacity =
419            align_up(slot_capacity, self.config.alignment).ok_or(BuildError::SizeOverflow)?;
420
421        let total_size = slot_count
422            .checked_mul(aligned_capacity)
423            .ok_or(BuildError::SizeOverflow)?;
424
425        let layout = Layout::from_size_align(total_size, self.config.alignment)
426            .map_err(|_| BuildError::SizeOverflow)?;
427
428        // SAFETY: layout has non-zero size (slot_count > 0, aligned_capacity > 0).
429        let ptr = unsafe { std::alloc::alloc(layout) };
430        if ptr.is_null() {
431            std::alloc::handle_alloc_error(layout);
432        }
433
434        let inner = ArenaInner {
435            ptr,
436            layout,
437            slot_capacity: aligned_capacity,
438            slot_count,
439            bitmap: AtomicBitmap::new(slot_count),
440            auto_spill: self.config.auto_spill,
441            init_policy: self.config.init_policy,
442            metrics: MetricsState::new(total_size),
443            #[cfg(feature = "async-alloc")]
444            wake_handle,
445        };
446
447        Ok(FixedArena {
448            inner: Arc::new(inner),
449        })
450    }
451}
452
453#[cfg(feature = "async-alloc")]
454impl FixedArenaBuilder {
455    /// Build an async-capable arena using the default notify-based waiter.
456    pub fn build_async(self) -> Result<crate::async_alloc::AsyncFixedArena, BuildError> {
457        self.build_async_with(crate::async_alloc::NotifyWaiters::new(1))
458    }
459
460    /// Build an async-capable arena with a custom waiter policy.
461    pub fn build_async_with<W>(
462        self,
463        waiters: W,
464    ) -> Result<crate::async_alloc::AsyncFixedArena<W>, BuildError>
465    where
466        W: crate::async_alloc::Waiter,
467    {
468        let page_size = self.config.page_size.resolve();
469        let waiters = std::sync::Arc::new(waiters);
470        let arena = self.build_inner(Some(crate::async_alloc::WakeHandle::new(
471            std::sync::Arc::clone(&waiters),
472        )))?;
473
474        if let Some(ps) = page_size {
475            prefault_region(
476                arena.inner.ptr,
477                arena.inner.slot_count * arena.inner.slot_capacity,
478                ps,
479            );
480        }
481
482        Ok(crate::async_alloc::AsyncFixedArena::new(arena, waiters))
483    }
484}
485
486fn align_up(value: usize, alignment: usize) -> Option<usize> {
487    let rounded = value.checked_add(alignment - 1)?;
488    Some(rounded & !(alignment - 1))
489}
490
491#[cfg(test)]
492mod tests {
493    use super::*;
494    use std::num::NonZeroUsize;
495
496    fn nz(n: usize) -> NonZeroUsize {
497        NonZeroUsize::new(n).unwrap()
498    }
499
500    #[test]
501    fn build_basic_arena() {
502        let arena = FixedArena::with_slot_capacity(nz(4), nz(64))
503            .build()
504            .unwrap();
505        assert_eq!(arena.slot_count(), 4);
506        assert_eq!(arena.slot_capacity(), 64);
507    }
508
509    #[test]
510    fn build_invalid_alignment_fails() {
511        let err = FixedArena::with_slot_capacity(nz(4), nz(64))
512            .alignment(3)
513            .build()
514            .unwrap_err();
515        assert_eq!(err, BuildError::InvalidAlignment);
516    }
517
518    #[test]
519    fn build_zero_alignment_fails() {
520        let err = FixedArena::with_slot_capacity(nz(4), nz(64))
521            .alignment(0)
522            .build()
523            .unwrap_err();
524        assert_eq!(err, BuildError::InvalidAlignment);
525    }
526
527    #[test]
528    fn metrics_track_allocate_free_and_failure() {
529        let arena = FixedArena::with_slot_capacity(nz(1), nz(64))
530            .build()
531            .unwrap();
532
533        let initial = arena.metrics();
534        assert_eq!(initial.bytes_reserved, 64);
535        assert_eq!(initial.bytes_live, 0);
536
537        let buf = arena.allocate().unwrap();
538        let after_alloc = arena.metrics();
539        assert_eq!(after_alloc.allocations_ok, 1);
540        assert_eq!(after_alloc.allocations_failed, 0);
541        assert_eq!(after_alloc.bytes_live, 64);
542
543        assert_eq!(arena.allocate().unwrap_err(), AllocError::ArenaFull);
544        let after_fail = arena.metrics();
545        assert_eq!(after_fail.allocations_failed, 1);
546        assert_eq!(after_fail.bytes_live, 64);
547
548        drop(buf);
549        let after_free = arena.metrics();
550        assert_eq!(after_free.frees, 1);
551        assert_eq!(after_free.bytes_live, 0);
552    }
553
554    #[test]
555    fn build_size_overflow_fails() {
556        let err = FixedArena::with_slot_capacity(nz(usize::MAX), nz(2))
557            .build()
558            .unwrap_err();
559        assert_eq!(err, BuildError::SizeOverflow);
560    }
561
562    #[test]
563    fn alignment_rounding_overflow_fails() {
564        let err = FixedArena::with_slot_capacity(nz(1), nz(usize::MAX))
565            .alignment(2)
566            .build()
567            .unwrap_err();
568        assert_eq!(err, BuildError::SizeOverflow);
569    }
570
571    #[test]
572    fn alignment_rounds_capacity_up() {
573        let arena = FixedArena::with_slot_capacity(nz(2), nz(100))
574            .alignment(64)
575            .build()
576            .unwrap();
577        assert_eq!(arena.slot_capacity(), 128);
578    }
579
580    #[test]
581    fn alignment_4096_rounds_up() {
582        let arena = FixedArena::with_slot_capacity(nz(4), nz(100))
583            .alignment(4096)
584            .build()
585            .unwrap();
586        assert_eq!(arena.slot_capacity(), 4096);
587    }
588
589    #[test]
590    fn prefault_disabled_builds() {
591        let arena = FixedArena::with_slot_capacity(nz(4), nz(64))
592            .page_size(PageSize::Unknown)
593            .build()
594            .unwrap();
595        assert_eq!(arena.slot_count(), 4);
596    }
597
598    #[test]
599    fn prefault_explicit_page_size_builds() {
600        let arena = FixedArena::with_slot_capacity(nz(4), nz(4096))
601            .page_size(PageSize::Size(nz(4096)))
602            .build()
603            .unwrap();
604        assert_eq!(arena.slot_count(), 4);
605    }
606
607    #[cfg(all(unix, feature = "libc"))]
608    #[test]
609    fn prefault_auto_builds() {
610        let arena = FixedArena::with_slot_capacity(nz(4), nz(4096))
611            .page_size(PageSize::Auto)
612            .build()
613            .unwrap();
614        assert_eq!(arena.slot_count(), 4);
615    }
616
617    #[test]
618    fn build_unfaulted_then_fault_pages() {
619        let faultable = FixedArena::with_slot_capacity(nz(4), nz(4096))
620            .page_size(PageSize::Size(nz(4096)))
621            .build_unfaulted()
622            .unwrap();
623        let arena = faultable.fault_pages();
624        assert_eq!(arena.slot_count(), 4);
625        let _buf = arena.allocate().unwrap();
626    }
627
628    #[test]
629    fn build_unfaulted_into_inner_skips_fault() {
630        let faultable = FixedArena::with_slot_capacity(nz(4), nz(64))
631            .page_size(PageSize::Unknown)
632            .build_unfaulted()
633            .unwrap();
634        let arena = faultable.into_inner();
635        assert_eq!(arena.slot_count(), 4);
636        let _buf = arena.allocate().unwrap();
637    }
638
639    #[test]
640    fn clone_shares_inner() {
641        let arena = FixedArena::with_slot_capacity(nz(2), nz(64))
642            .build()
643            .unwrap();
644        let arena2 = arena.clone();
645        assert_eq!(arena.slot_count(), arena2.slot_count());
646        assert_eq!(arena.slot_capacity(), arena2.slot_capacity());
647    }
648
649    #[test]
650    fn allocate_and_drop() {
651        let arena = FixedArena::with_slot_capacity(nz(2), nz(64))
652            .build()
653            .unwrap();
654
655        let buf1 = arena.allocate().unwrap();
656        let buf2 = arena.allocate().unwrap();
657        assert!(arena.allocate().is_err(), "arena should be full");
658
659        drop(buf1);
660        let _buf3 = arena.allocate().unwrap();
661        drop(buf2);
662    }
663
664    #[test]
665    fn allocate_full_returns_arena_full() {
666        let arena = FixedArena::with_slot_capacity(nz(1), nz(32))
667            .build()
668            .unwrap();
669
670        let _buf = arena.allocate().unwrap();
671        let err = arena.allocate().unwrap_err();
672        assert_eq!(err, crate::AllocError::ArenaFull);
673    }
674
675    #[test]
676    fn drop_returns_slot() {
677        let arena = FixedArena::with_slot_capacity(nz(1), nz(32))
678            .build()
679            .unwrap();
680
681        let buf = arena.allocate().unwrap();
682        drop(buf);
683        assert!(
684            arena.allocate().is_ok(),
685            "slot should be available after drop"
686        );
687    }
688
689    #[test]
690    fn init_policy_zero_fills_slot() {
691        use bytes::BufMut;
692
693        let arena = FixedArena::with_slot_capacity(nz(1), nz(64))
694            .init_policy(InitPolicy::Zero)
695            .page_size(PageSize::Unknown)
696            .build()
697            .unwrap();
698
699        // Write non-zero data, freeze, drop to return the slot.
700        let mut buf = arena.allocate().unwrap();
701        buf.put_slice(&[0xAB; 64]);
702        let bytes = buf.freeze();
703        drop(bytes);
704
705        // Re-allocate the same slot; zero policy should have cleared it.
706        let buf = arena.allocate().unwrap();
707        let slot = unsafe { std::slice::from_raw_parts(buf.ptr.add(buf.offset), 64) };
708        assert!(slot.iter().all(|&b| b == 0), "slot should be zeroed");
709    }
710
711    #[test]
712    fn init_policy_default_is_uninit() {
713        assert_eq!(InitPolicy::default(), InitPolicy::Uninit);
714    }
715
716    #[test]
717    fn builder_with_arena_capacity() {
718        let arena = FixedArena::with_arena_capacity(nz(4), nz(256))
719            .build()
720            .unwrap();
721        assert_eq!(arena.slot_count(), 4);
722        assert_eq!(arena.slot_capacity(), 64);
723    }
724
725    #[test]
726    fn builder_arena_capacity_rounds_up() {
727        let arena = FixedArena::with_arena_capacity(nz(3), nz(1000))
728            .build()
729            .unwrap();
730        assert_eq!(arena.slot_capacity(), 334);
731    }
732
733    #[test]
734    fn builder_arena_capacity_with_alignment() {
735        let arena = FixedArena::with_arena_capacity(nz(3), nz(1000))
736            .alignment(64)
737            .build()
738            .unwrap();
739        assert_eq!(arena.slot_capacity(), 384);
740    }
741}