use core::alloc::Layout;
use core::ffi::c_void;
use core::mem;
use core::ptr::{self, NonNull};
use nginx_sys::{
ngx_buf_t, ngx_create_temp_buf, ngx_palloc, ngx_pcalloc, ngx_pfree, ngx_pmemalign, ngx_pnalloc,
ngx_pool_cleanup_add, ngx_pool_t, NGX_ALIGNMENT,
};
use crate::allocator::{dangling_for_layout, AllocError, Allocator};
use crate::core::buffer::{Buffer, MemoryBuffer, TemporaryBuffer};
#[derive(Clone, Debug)]
#[repr(transparent)]
pub struct Pool(NonNull<ngx_pool_t>);
unsafe impl Allocator for Pool {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
let ptr = if layout.size() == 0 {
return Ok(NonNull::slice_from_raw_parts(
dangling_for_layout(&layout),
layout.size(),
));
} else if layout.align() == 1 {
unsafe { ngx_pnalloc(self.0.as_ptr(), layout.size()) }
} else if layout.align() <= NGX_ALIGNMENT {
unsafe { ngx_palloc(self.0.as_ptr(), layout.size()) }
} else if cfg!(any(
ngx_feature = "have_posix_memalign",
ngx_feature = "have_memalign"
)) {
unsafe { ngx_pmemalign(self.0.as_ptr(), layout.size(), layout.align()) }
} else {
return Err(AllocError);
};
debug_assert_eq!(ptr.align_offset(layout.align()), 0);
let ptr = NonNull::new(ptr.cast()).ok_or(AllocError)?;
Ok(NonNull::slice_from_raw_parts(ptr, layout.size()))
}
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
if layout.size() > 0 && (layout.size() > self.as_ref().max || layout.align() > NGX_ALIGNMENT)
{
ngx_pfree(self.0.as_ptr(), ptr.as_ptr().cast());
}
}
unsafe fn grow(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
debug_assert!(
new_layout.size() >= old_layout.size(),
"`new_layout.size()` must be greater than or equal to `old_layout.size()`"
);
self.resize(ptr, old_layout, new_layout)
}
unsafe fn grow_zeroed(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
debug_assert!(
new_layout.size() >= old_layout.size(),
"`new_layout.size()` must be greater than or equal to `old_layout.size()`"
);
#[allow(clippy::manual_inspect)]
self.resize(ptr, old_layout, new_layout).map(|new_ptr| {
unsafe {
new_ptr
.cast::<u8>()
.byte_add(old_layout.size())
.write_bytes(0, new_layout.size() - old_layout.size())
};
new_ptr
})
}
unsafe fn shrink(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
debug_assert!(
new_layout.size() <= old_layout.size(),
"`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
);
self.resize(ptr, old_layout, new_layout)
}
}
impl AsRef<ngx_pool_t> for Pool {
#[inline]
fn as_ref(&self) -> &ngx_pool_t {
unsafe { self.0.as_ref() }
}
}
impl AsMut<ngx_pool_t> for Pool {
#[inline]
fn as_mut(&mut self) -> &mut ngx_pool_t {
unsafe { self.0.as_mut() }
}
}
impl Pool {
pub unsafe fn from_ngx_pool(pool: *mut ngx_pool_t) -> Pool {
debug_assert!(!pool.is_null());
debug_assert!(pool.is_aligned());
Pool(NonNull::new_unchecked(pool))
}
pub fn as_ptr(&self) -> *mut ngx_pool_t {
self.0.as_ptr()
}
pub fn create_buffer(&self, size: usize) -> Option<TemporaryBuffer> {
let buf = unsafe { ngx_create_temp_buf(self.0.as_ptr(), size) };
if buf.is_null() {
return None;
}
Some(TemporaryBuffer::from_ngx_buf(buf))
}
pub fn create_buffer_from_str(&self, str: &str) -> Option<TemporaryBuffer> {
let mut buffer = self.create_buffer(str.len())?;
unsafe {
let buf = buffer.as_ngx_buf_mut();
ptr::copy_nonoverlapping(str.as_ptr(), (*buf).pos, str.len());
(*buf).last = (*buf).pos.add(str.len());
}
Some(buffer)
}
pub fn create_buffer_from_static_str(&self, str: &'static str) -> Option<MemoryBuffer> {
let buf = self.calloc_type::<ngx_buf_t>();
if buf.is_null() {
return None;
}
let start = str.as_ptr() as *mut u8;
let end = unsafe { start.add(str.len()) };
unsafe {
(*buf).start = start;
(*buf).pos = start;
(*buf).last = end;
(*buf).end = end;
(*buf).set_memory(1);
}
Some(MemoryBuffer::from_ngx_buf(buf))
}
unsafe fn add_cleanup_for_value<T>(&self, value: *mut T) -> Result<(), ()> {
let cln = ngx_pool_cleanup_add(self.0.as_ptr(), 0);
if cln.is_null() {
return Err(());
}
(*cln).handler = Some(cleanup_type::<T>);
(*cln).data = value as *mut c_void;
Ok(())
}
pub fn alloc(&self, size: usize) -> *mut c_void {
unsafe { ngx_palloc(self.0.as_ptr(), size) }
}
pub fn alloc_type<T: Copy>(&self) -> *mut T {
self.alloc(mem::size_of::<T>()) as *mut T
}
pub fn calloc(&self, size: usize) -> *mut c_void {
unsafe { ngx_pcalloc(self.0.as_ptr(), size) }
}
pub fn calloc_type<T: Copy>(&self) -> *mut T {
self.calloc(mem::size_of::<T>()) as *mut T
}
pub fn alloc_unaligned(&self, size: usize) -> *mut c_void {
unsafe { ngx_pnalloc(self.0.as_ptr(), size) }
}
pub fn alloc_type_unaligned<T: Copy>(&self) -> *mut T {
self.alloc_unaligned(mem::size_of::<T>()) as *mut T
}
pub fn allocate<T>(&self, value: T) -> *mut T {
unsafe {
let p = self.alloc(mem::size_of::<T>()) as *mut T;
ptr::write(p, value);
if self.add_cleanup_for_value(p).is_err() {
ptr::drop_in_place(p);
return ptr::null_mut();
};
p
}
}
#[inline(always)]
unsafe fn resize(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
if ptr.byte_add(old_layout.size()).as_ptr() == self.as_ref().d.last
&& ptr.byte_add(new_layout.size()).as_ptr() <= self.as_ref().d.end
&& ptr.align_offset(new_layout.align()) == 0
{
let pool = self.0.as_ptr();
unsafe { (*pool).d.last = ptr.byte_add(new_layout.size()).as_ptr() };
Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size()))
} else {
let size = core::cmp::min(old_layout.size(), new_layout.size());
let new_ptr = <Self as Allocator>::allocate(self, new_layout)?;
unsafe {
ptr.copy_to_nonoverlapping(new_ptr.cast(), size);
self.deallocate(ptr, old_layout);
}
Ok(new_ptr)
}
}
}
unsafe extern "C" fn cleanup_type<T>(data: *mut c_void) {
ptr::drop_in_place(data as *mut T);
}