pub mod arena;
pub mod bump;
pub mod config;
pub mod platform;
pub mod polynomial;
pub mod witness;
pub use arena::{ArenaManager, ArenaStats};
pub use bump::BumpAlloc;
pub use config::*;
pub use platform::sys;
#[cfg(feature = "guard-pages")]
pub use platform::GuardedAlloc;
#[cfg(feature = "huge-pages")]
pub use platform::HugePageSize;
pub use platform::{AllocErrorKind, AllocFailed};
pub use polynomial::PolynomialArena;
pub use witness::WitnessArena;
use std::alloc::{GlobalAlloc, Layout, System};
use std::ptr::{copy_nonoverlapping, null_mut};
use std::sync::atomic::{AtomicPtr, AtomicU8, Ordering};
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(u8)]
enum InitState {
Uninitialized = 0,
Initializing = 1,
Initialized = 2,
Fallback = 3,
}
#[must_use]
pub struct NAlloc {
arenas: AtomicPtr<ArenaManager>,
init_state: AtomicU8,
}
impl NAlloc {
pub const fn new() -> Self {
Self {
arenas: AtomicPtr::new(null_mut()),
init_state: AtomicU8::new(InitState::Uninitialized as u8),
}
}
pub fn try_new() -> Result<Self, AllocFailed> {
let nalloc = Self::new();
nalloc.try_init()?;
Ok(nalloc)
}
fn try_init(&self) -> Result<(), AllocFailed> {
let state = self.init_state.load(Ordering::Acquire);
match state {
s if s == InitState::Initialized as u8 => Ok(()),
s if s == InitState::Fallback as u8 => {
Err(AllocFailed::with_kind(0, AllocErrorKind::OutOfMemory))
}
_ => {
let ptr = self.init_arenas();
if ptr.is_null() {
Err(AllocFailed::with_kind(0, AllocErrorKind::OutOfMemory))
} else {
Ok(())
}
}
}
}
#[cold]
#[inline(never)]
fn init_arenas(&self) -> *mut ArenaManager {
let state = self.init_state.load(Ordering::Acquire);
if state == InitState::Initialized as u8 {
return self.arenas.load(Ordering::Acquire);
}
if state == InitState::Fallback as u8 {
return null_mut();
}
if self
.init_state
.compare_exchange(
InitState::Uninitialized as u8,
InitState::Initializing as u8,
Ordering::AcqRel,
Ordering::Relaxed,
)
.is_ok()
{
match ArenaManager::new() {
Ok(manager) => {
let layout = Layout::new::<ArenaManager>();
let raw = unsafe { System.alloc(layout) as *mut ArenaManager };
if raw.is_null() {
eprintln!("[nalloc] Warning: Failed to allocate ArenaManager struct, using system allocator");
self.init_state
.store(InitState::Fallback as u8, Ordering::Release);
return null_mut();
}
unsafe {
std::ptr::write(raw, manager);
}
self.arenas.store(raw, Ordering::Release);
self.init_state
.store(InitState::Initialized as u8, Ordering::Release);
return raw;
}
Err(e) => {
eprintln!(
"[nalloc] Warning: Arena initialization failed ({}), using system allocator",
e
);
self.init_state
.store(InitState::Fallback as u8, Ordering::Release);
return null_mut();
}
}
}
for i in 0..MAX_CAS_RETRIES {
for _ in 0..SPIN_ITERATIONS {
std::hint::spin_loop();
}
if i % 10 == 9 {
std::thread::yield_now();
}
let state = self.init_state.load(Ordering::Acquire);
match state {
s if s == InitState::Initialized as u8 => {
return self.arenas.load(Ordering::Acquire);
}
s if s == InitState::Fallback as u8 => {
return null_mut();
}
_ => continue,
}
}
#[cfg(debug_assertions)]
eprintln!("[nalloc] Warning: Arena initialization timed out, using system allocator");
null_mut()
}
#[must_use]
#[inline]
pub fn is_fallback_mode(&self) -> bool {
self.init_state.load(Ordering::Relaxed) == InitState::Fallback as u8
}
#[must_use]
#[inline]
pub fn is_initialized(&self) -> bool {
self.init_state.load(Ordering::Relaxed) == InitState::Initialized as u8
}
#[inline(always)]
fn get_arenas(&self) -> Option<&ArenaManager> {
let state = self.init_state.load(Ordering::Acquire);
if state == InitState::Initialized as u8 {
let ptr = self.arenas.load(Ordering::Acquire);
if !ptr.is_null() {
return Some(unsafe { &*ptr });
}
}
if state == InitState::Uninitialized as u8 || state == InitState::Initializing as u8 {
let ptr = self.init_arenas();
if !ptr.is_null() {
return Some(unsafe { &*ptr });
}
}
None
}
#[inline]
pub fn witness(&self) -> WitnessArena {
self.try_witness()
.expect("Arena initialization failed - use try_witness() for fallible access")
}
#[must_use]
#[inline]
pub fn try_witness(&self) -> Option<WitnessArena> {
self.get_arenas().map(|a| WitnessArena::new(a.witness()))
}
#[inline]
pub fn polynomial(&self) -> PolynomialArena {
self.try_polynomial()
.expect("Arena initialization failed - use try_polynomial() for fallible access")
}
#[must_use]
#[inline]
pub fn try_polynomial(&self) -> Option<PolynomialArena> {
self.get_arenas()
.map(|a| PolynomialArena::new(a.polynomial()))
}
#[inline]
pub fn scratch(&self) -> std::sync::Arc<BumpAlloc> {
self.try_scratch()
.expect("Arena initialization failed - use try_scratch() for fallible access")
}
#[must_use]
#[inline]
pub fn try_scratch(&self) -> Option<std::sync::Arc<BumpAlloc>> {
self.get_arenas().map(|a| a.scratch())
}
pub unsafe fn reset_all(&self) {
if let Some(arenas) = self.get_arenas() {
arenas.reset_all();
}
}
#[must_use]
pub fn stats(&self) -> Option<ArenaStats> {
self.get_arenas().map(|a| a.stats())
}
#[must_use]
pub fn stats_or_default(&self) -> ArenaStats {
self.stats().unwrap_or(ArenaStats {
witness_used: 0,
witness_capacity: 0,
polynomial_used: 0,
polynomial_capacity: 0,
scratch_used: 0,
scratch_capacity: 0,
#[cfg(feature = "fallback")]
witness_fallback_bytes: 0,
#[cfg(feature = "fallback")]
polynomial_fallback_bytes: 0,
#[cfg(feature = "fallback")]
scratch_fallback_bytes: 0,
})
}
}
impl Default for NAlloc {
fn default() -> Self {
Self::new()
}
}
impl Drop for NAlloc {
fn drop(&mut self) {
if *self.init_state.get_mut() == InitState::Initialized as u8 {
let ptr = *self.arenas.get_mut();
if !ptr.is_null() {
unsafe {
std::ptr::drop_in_place(ptr);
let layout = Layout::new::<ArenaManager>();
System.dealloc(ptr as *mut u8, layout);
}
}
}
}
}
unsafe impl Send for NAlloc {}
unsafe impl Sync for NAlloc {}
unsafe impl GlobalAlloc for NAlloc {
#[inline(always)]
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
debug_assert!(layout.size() > 0);
debug_assert!(layout.align() > 0);
debug_assert!(layout.align().is_power_of_two());
if let Some(arenas) = self.get_arenas() {
if layout.size() > LARGE_ALLOC_THRESHOLD {
arenas.polynomial().alloc(layout.size(), layout.align())
} else {
arenas.scratch().alloc(layout.size(), layout.align())
}
} else {
System.alloc(layout)
}
}
#[inline(always)]
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
if self.is_fallback_mode() {
System.dealloc(ptr, layout);
return;
}
if let Some(arenas) = self.get_arenas() {
let ptr_addr = ptr as usize;
if !arenas.contains_address(ptr_addr) {
System.dealloc(ptr, layout);
}
}
}
#[inline(always)]
unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
debug_assert!(!ptr.is_null());
debug_assert!(layout.size() > 0);
debug_assert!(new_size > 0);
let old_size = layout.size();
if new_size <= old_size {
return ptr;
}
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
let new_ptr = self.alloc(new_layout);
if new_ptr.is_null() {
return null_mut();
}
copy_nonoverlapping(ptr, new_ptr, old_size);
self.dealloc(ptr, layout);
new_ptr
}
#[inline(always)]
unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
let ptr = self.alloc(layout);
if !ptr.is_null() {
std::ptr::write_bytes(ptr, 0, layout.size());
}
ptr
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::alloc::GlobalAlloc;
#[test]
fn test_global_alloc_api() {
let alloc = NAlloc::new();
let layout = Layout::from_size_align(1024, 8).unwrap();
unsafe {
let ptr = alloc.alloc(layout);
assert!(!ptr.is_null());
ptr.write(42);
assert_eq!(ptr.read(), 42);
}
}
#[test]
fn test_try_new() {
let result = NAlloc::try_new();
assert!(result.is_ok());
let alloc = result.unwrap();
assert!(alloc.is_initialized());
assert!(!alloc.is_fallback_mode());
}
#[test]
fn test_fallback_mode_detection() {
let alloc = NAlloc::new();
let _ = alloc.stats();
assert!(alloc.is_initialized() || alloc.is_fallback_mode());
}
#[test]
fn test_try_accessors() {
let alloc = NAlloc::new();
assert!(alloc.try_witness().is_some());
assert!(alloc.try_polynomial().is_some());
assert!(alloc.try_scratch().is_some());
}
#[test]
fn test_realloc() {
let alloc = NAlloc::new();
let layout = Layout::from_size_align(64, 8).unwrap();
unsafe {
let ptr = alloc.alloc(layout);
assert!(!ptr.is_null());
for i in 0..64 {
ptr.add(i).write(i as u8);
}
let new_ptr = alloc.realloc(ptr, layout, 128);
assert!(!new_ptr.is_null());
for i in 0..64 {
assert_eq!(new_ptr.add(i).read(), i as u8);
}
}
}
#[test]
fn test_alloc_zeroed() {
let alloc = NAlloc::new();
let layout = Layout::from_size_align(1024, 8).unwrap();
unsafe {
let ptr = alloc.alloc_zeroed(layout);
assert!(!ptr.is_null());
for i in 0..1024 {
assert_eq!(*ptr.add(i), 0);
}
}
}
#[test]
fn test_stats() {
let alloc = NAlloc::new();
let layout = Layout::from_size_align(1024, 8).unwrap();
unsafe {
let _ = alloc.alloc(layout);
}
let stats = alloc.stats();
assert!(stats.is_some());
let stats = stats.unwrap();
assert!(stats.scratch_used >= 1024);
assert!(stats.total_capacity() > 0);
}
#[test]
fn test_stats_or_default() {
let alloc = NAlloc::new();
let stats = alloc.stats_or_default();
let _ = stats.total_capacity();
}
#[test]
fn test_large_allocation_routing() {
let alloc = NAlloc::new();
let small_layout = Layout::from_size_align(1024, 8).unwrap();
unsafe {
let _ = alloc.alloc(small_layout);
}
let stats_after_small = alloc.stats().unwrap();
assert!(stats_after_small.scratch_used >= 1024);
let large_layout = Layout::from_size_align(2 * 1024 * 1024, 64).unwrap();
unsafe {
let _ = alloc.alloc(large_layout);
}
let stats_after_large = alloc.stats().unwrap();
assert!(stats_after_large.polynomial_used >= 2 * 1024 * 1024);
}
#[test]
fn test_drop_deallocates_arena_manager() {
{
let alloc = NAlloc::try_new().expect("NAlloc::try_new should succeed");
assert!(alloc.is_initialized());
}
let alloc2 = NAlloc::try_new().expect("heap still healthy after previous drop");
assert!(alloc2.is_initialized());
}
#[test]
fn test_concurrent_init() {
use std::sync::Arc;
use std::thread;
let alloc = Arc::new(NAlloc::new());
let mut handles = vec![];
for _ in 0..8 {
let alloc = Arc::clone(&alloc);
handles.push(thread::spawn(move || {
let layout = Layout::from_size_align(64, 8).unwrap();
unsafe {
let ptr = alloc.alloc(layout);
assert!(!ptr.is_null());
}
}));
}
for h in handles {
h.join().unwrap();
}
assert!(alloc.is_initialized() || alloc.is_fallback_mode());
}
}