use std::alloc::{self, Layout};
use std::ops::{Deref, DerefMut, Index, IndexMut};
use std::ptr::NonNull;
use crate::arch::BYTE_ALIGNMENT;
use crate::error::{OjphError, Result};
pub struct AlignedVec<T> {
ptr: NonNull<T>,
len: usize,
capacity: usize,
alignment: usize,
}
unsafe impl<T: Send> Send for AlignedVec<T> {}
unsafe impl<T: Sync> Sync for AlignedVec<T> {}
impl<T> AlignedVec<T> {
pub fn new() -> Self {
Self {
ptr: NonNull::dangling(),
len: 0,
capacity: 0,
alignment: BYTE_ALIGNMENT as usize,
}
}
pub fn with_alignment(alignment: usize) -> Self {
assert!(
alignment.is_power_of_two(),
"alignment must be a power of two"
);
Self {
ptr: NonNull::dangling(),
len: 0,
capacity: 0,
alignment,
}
}
pub fn resize(&mut self, count: usize) -> Result<()>
where
T: Default + Copy,
{
self.dealloc_inner();
if count == 0 {
return Ok(());
}
let layout = self.make_layout(count)?;
let raw = unsafe { alloc::alloc_zeroed(layout) };
if raw.is_null() {
return Err(OjphError::AllocationFailed);
}
self.ptr = unsafe { NonNull::new_unchecked(raw.cast::<T>()) };
self.len = count;
self.capacity = count;
Ok(())
}
#[inline]
pub fn len(&self) -> usize {
self.len
}
#[inline]
pub fn is_empty(&self) -> bool {
self.len == 0
}
#[inline]
pub fn as_ptr(&self) -> *const T {
self.ptr.as_ptr()
}
#[inline]
pub fn as_mut_ptr(&mut self) -> *mut T {
self.ptr.as_ptr()
}
fn make_layout(&self, count: usize) -> Result<Layout> {
let size = count
.checked_mul(std::mem::size_of::<T>())
.ok_or(OjphError::AllocationFailed)?;
let align = self.alignment.max(std::mem::align_of::<T>());
Layout::from_size_align(size, align).map_err(|_| OjphError::AllocationFailed)
}
fn dealloc_inner(&mut self) {
if self.capacity > 0 {
if let Ok(layout) = self.make_layout(self.capacity) {
unsafe { alloc::dealloc(self.ptr.as_ptr().cast::<u8>(), layout) };
}
self.len = 0;
self.capacity = 0;
self.ptr = NonNull::dangling();
}
}
}
impl<T> Drop for AlignedVec<T> {
fn drop(&mut self) {
self.dealloc_inner();
}
}
impl<T> Deref for AlignedVec<T> {
type Target = [T];
#[inline]
fn deref(&self) -> &[T] {
if self.len == 0 {
return &[];
}
unsafe { std::slice::from_raw_parts(self.ptr.as_ptr(), self.len) }
}
}
impl<T> DerefMut for AlignedVec<T> {
#[inline]
fn deref_mut(&mut self) -> &mut [T] {
if self.len == 0 {
return &mut [];
}
unsafe { std::slice::from_raw_parts_mut(self.ptr.as_ptr(), self.len) }
}
}
impl<T> Index<usize> for AlignedVec<T> {
type Output = T;
#[inline]
fn index(&self, idx: usize) -> &T {
&(**self)[idx]
}
}
impl<T> IndexMut<usize> for AlignedVec<T> {
#[inline]
fn index_mut(&mut self, idx: usize) -> &mut T {
&mut (**self)[idx]
}
}
impl<T> Default for AlignedVec<T> {
fn default() -> Self {
Self::new()
}
}
pub const LFT_UNDEFINED: u32 = 0x00;
pub const LFT_32BIT: u32 = 0x04;
pub const LFT_64BIT: u32 = 0x08;
pub const LFT_INTEGER: u32 = 0x10;
pub const LFT_SIZE_MASK: u32 = 0x0F;
#[derive(Debug, Clone, Copy)]
pub enum LineBufData {
I32(*mut i32),
I64(*mut i64),
F32(*mut f32),
None,
}
#[derive(Debug)]
pub struct LineBuf {
pub size: usize,
pub pre_size: u32,
pub flags: u32,
pub data: LineBufData,
}
impl LineBuf {
pub fn new() -> Self {
Self {
size: 0,
pre_size: 0,
flags: LFT_UNDEFINED,
data: LineBufData::None,
}
}
}
impl Default for LineBuf {
fn default() -> Self {
Self::new()
}
}
pub struct LiftingBuf {
pub active: bool,
pub line_idx: Option<usize>,
}
impl LiftingBuf {
pub fn new() -> Self {
Self {
active: false,
line_idx: None,
}
}
}
impl Default for LiftingBuf {
fn default() -> Self {
Self::new()
}
}
pub struct MemFixedAllocator {
buf: Vec<u8>,
offset: usize,
alignment: usize,
}
impl MemFixedAllocator {
pub fn new() -> Self {
Self {
buf: Vec::new(),
offset: 0,
alignment: BYTE_ALIGNMENT as usize,
}
}
pub fn pre_alloc_data(&mut self, size: usize, _count: usize) {
let aligned = (size + self.alignment - 1) & !(self.alignment - 1);
self.offset += aligned;
}
pub fn finalize(&mut self) -> Result<()> {
self.buf = vec![0u8; self.offset];
self.offset = 0;
Ok(())
}
pub fn alloc_data(&mut self, size: usize) -> Result<*mut u8> {
let aligned = (size + self.alignment - 1) & !(self.alignment - 1);
if self.offset + aligned > self.buf.len() {
return Err(OjphError::AllocationFailed);
}
let ptr = self.buf[self.offset..].as_mut_ptr();
self.offset += aligned;
Ok(ptr)
}
}
impl Default for MemFixedAllocator {
fn default() -> Self {
Self::new()
}
}
pub struct CodedLists {
pub next: Option<Box<CodedLists>>,
pub buf: *mut u8,
pub buf_size: usize,
pub avail: bool,
}
impl CodedLists {
pub fn new() -> Self {
Self {
next: None,
buf: std::ptr::null_mut(),
buf_size: 0,
avail: false,
}
}
}
impl Default for CodedLists {
fn default() -> Self {
Self::new()
}
}
const ELASTIC_CHUNK_SIZE: usize = 256 * 1024;
pub struct MemElasticAllocator {
chunks: Vec<Vec<u8>>,
chunk_size: usize,
cur_offset: usize,
}
impl MemElasticAllocator {
pub fn new() -> Self {
Self {
chunks: Vec::new(),
chunk_size: ELASTIC_CHUNK_SIZE,
cur_offset: 0,
}
}
pub fn with_chunk_size(chunk_size: usize) -> Self {
Self {
chunks: Vec::new(),
chunk_size,
cur_offset: 0,
}
}
pub fn alloc_data(&mut self, size: usize) -> Result<*mut u8> {
let need = size;
if let Some(last) = self.chunks.last() {
if self.cur_offset + need <= last.len() {
let ptr = unsafe { last.as_ptr().add(self.cur_offset) as *mut u8 };
self.cur_offset += need;
return Ok(ptr);
}
}
let alloc_size = self.chunk_size.max(need);
let chunk = vec![0u8; alloc_size];
let ptr = chunk.as_ptr() as *mut u8;
self.chunks.push(chunk);
self.cur_offset = need;
Ok(ptr)
}
pub fn reset(&mut self) {
self.chunks.clear();
self.cur_offset = 0;
}
}
impl Default for MemElasticAllocator {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn aligned_vec_basic() {
let mut v = AlignedVec::<i32>::new();
v.resize(128).unwrap();
assert_eq!(v.len(), 128);
assert_eq!(v[0], 0);
v[0] = 42;
assert_eq!(v[0], 42);
assert_eq!(v.as_ptr() as usize % BYTE_ALIGNMENT as usize, 0);
}
#[test]
fn fixed_allocator_round_trip() {
let mut a = MemFixedAllocator::new();
a.pre_alloc_data(100, 1);
a.pre_alloc_data(200, 1);
a.finalize().unwrap();
let p1 = a.alloc_data(100).unwrap();
let p2 = a.alloc_data(200).unwrap();
assert!(!p1.is_null());
assert!(!p2.is_null());
}
#[test]
fn elastic_allocator_basic() {
let mut a = MemElasticAllocator::new();
let p = a.alloc_data(64).unwrap();
assert!(!p.is_null());
a.reset();
}
}