use std::num::NonZero;
use bytesbuf::mem::{BlockSize, CallbackMemory, GlobalPool, HasMemory, MemoryShared};
use bytesbuf::{BytesBuf, BytesView};
fn main() {
let global_memory_pool = GlobalPool::new();
let io_context = IoContext::new();
let mut connection = Connection::new(io_context);
let connection_memory = connection.memory();
let message1 = BytesView::copied_from_slice(b"Example message 1: hello, world!", &connection_memory);
connection.write(message1.clone());
let message2 = BytesView::copied_from_slice(b"Message 2: goodbye", &global_memory_pool);
connection.write(message2.clone());
let message3 = BytesView::from_views([message1, message2]);
connection.write(message3);
}
#[derive(Debug)]
struct Connection {
io_context: IoContext,
}
impl Connection {
pub const fn new(io_context: IoContext) -> Self {
Self { io_context }
}
pub fn write(&mut self, message: BytesView) {
let use_optimal_path = message.slices().all(|(_, meta)| {
meta.is_some_and(|meta| {
let Some(io_memory_configuration) = meta.downcast_ref::<MemoryConfiguration>() else {
return false;
};
io_memory_configuration.requires_registered_memory
})
});
if use_optimal_path {
self.write_optimal(message);
} else {
self.write_fallback(message);
}
}
#[expect(
clippy::needless_pass_by_ref_mut,
clippy::unused_self,
clippy::needless_pass_by_value,
reason = "for example realism"
)]
fn write_optimal(&mut self, message: BytesView) {
println!("Sending message of {} bytes using optimal path.", message.len());
}
#[expect(
clippy::needless_pass_by_ref_mut,
clippy::unused_self,
clippy::needless_pass_by_value,
reason = "for example realism"
)]
fn write_fallback(&mut self, message: BytesView) {
println!("Sending message of {} bytes using fallback path.", message.len());
}
}
const CONNECTION_OPTIMAL_MEMORY_CONFIGURATION: MemoryConfiguration = MemoryConfiguration {
requires_page_alignment: false,
zero_memory_on_release: false,
requires_registered_memory: true,
};
impl HasMemory for Connection {
fn memory(&self) -> impl MemoryShared {
CallbackMemory::new({
let io_context = self.io_context.clone();
move |min_len| io_context.reserve_io_memory(min_len, CONNECTION_OPTIMAL_MEMORY_CONFIGURATION)
})
}
}
#[derive(Clone, Debug)]
struct IoContext;
impl IoContext {
pub const fn new() -> Self {
Self {}
}
#[expect(clippy::unused_self, reason = "for example realism")]
pub fn reserve_io_memory(&self, min_len: usize, memory_configuration: MemoryConfiguration) -> BytesBuf {
let min_len: BlockSize = min_len
.try_into()
.expect("this example is limited to max allocation size of BlockSize, just to keep it simple");
let Some(min_len) = NonZero::new(min_len) else {
return BytesBuf::new();
};
let block = io_memory::allocate(min_len, memory_configuration);
BytesBuf::from_blocks([block])
}
}
#[derive(Debug)]
#[expect(dead_code, reason = "unused fields just for example realism")]
struct MemoryConfiguration {
requires_page_alignment: bool,
zero_memory_on_release: bool,
requires_registered_memory: bool,
}
mod io_memory {
use std::alloc::{Layout, alloc, dealloc};
use std::mem::MaybeUninit;
use std::num::NonZero;
use std::ptr::{self, NonNull};
use std::sync::atomic::{self, AtomicUsize};
use bytesbuf::mem::{Block, BlockMeta, BlockRef, BlockRefDynamic, BlockRefDynamicWithMeta, BlockRefVTable, BlockSize};
use super::MemoryConfiguration;
impl BlockMeta for MemoryConfiguration {}
#[must_use]
pub fn allocate(len: NonZero<BlockSize>, memory_configuration: MemoryConfiguration) -> Block {
let block_ptr = new_block(len, memory_configuration);
let block = unsafe { block_ptr.as_ref() };
let block_ref = unsafe { BlockRef::new(block_ptr, &BLOCK_REF_FUNCTION_TABLE) };
unsafe { Block::new(block.ptr, block.len, block_ref) }
}
struct IoMemoryBlock {
memory_configuration: MemoryConfiguration,
ptr: NonNull<MaybeUninit<u8>>,
len: NonZero<BlockSize>,
ref_count: AtomicUsize,
}
unsafe impl BlockRefDynamic for IoMemoryBlock {
type State = Self;
fn clone(state_ptr: NonNull<Self::State>) -> NonNull<Self::State> {
let state = unsafe { state_ptr.as_ref() };
state.ref_count.fetch_add(1, atomic::Ordering::Relaxed);
state_ptr
}
fn drop(state_ptr: NonNull<Self::State>) {
let state = unsafe { state_ptr.as_ref() };
if state.ref_count.fetch_sub(1, atomic::Ordering::Release) != 1 {
return;
}
atomic::fence(atomic::Ordering::Acquire);
unsafe { dealloc(state.ptr.as_ptr().cast(), byte_array_layout(state.len)) };
unsafe {
dealloc(state_ptr.as_ptr().cast(), BLOCK_LAYOUT);
}
}
}
unsafe impl BlockRefDynamicWithMeta for IoMemoryBlock {
fn meta(state_ptr: NonNull<Self::State>) -> NonNull<dyn BlockMeta> {
let state = unsafe { state_ptr.as_ref() };
let as_any: &dyn BlockMeta = &state.memory_configuration;
NonNull::new(ptr::from_ref(as_any).cast_mut()).expect("field of non-null is non-null")
}
}
const BLOCK_REF_FUNCTION_TABLE: BlockRefVTable<IoMemoryBlock> = BlockRefVTable::from_trait_with_meta();
fn byte_array_layout(len: NonZero<BlockSize>) -> Layout {
Layout::array::<u8>(len.get() as usize).expect("the layout of a byte array can always be determined")
}
const BLOCK_LAYOUT: Layout = unsafe { Layout::from_size_align_unchecked(size_of::<IoMemoryBlock>(), align_of::<IoMemoryBlock>()) };
fn new_block(len: NonZero<BlockSize>, memory_configuration: MemoryConfiguration) -> NonNull<IoMemoryBlock> {
let capacity_ptr = NonNull::new(unsafe { alloc(byte_array_layout(len)) })
.expect("we do not intend to handle failed allocations - they are fatal")
.cast::<MaybeUninit<u8>>();
let block_ptr = NonNull::new(unsafe { alloc(BLOCK_LAYOUT) })
.expect("we do not intend to handle failed allocations - they are fatal")
.cast::<IoMemoryBlock>();
let block = IoMemoryBlock {
ptr: capacity_ptr,
len,
ref_count: AtomicUsize::new(1),
memory_configuration,
};
unsafe { block_ptr.write(block) };
block_ptr
}
}