#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
struct SubmissionQueueRing
{
head: NonNull<AtomicU32>,
tail: NonNull<AtomicU32>,
ring_mask: NonNull<u32>,
ring_entries: NonNull<u32>,
flags: NonNull<AtomicU32>,
dropped: NonNull<AtomicU32>,
array: NonNull<u32>,
submission_queue_entries: MappedMemory,
}
impl SubmissionQueueRing
{
#[inline(always)]
pub(crate) fn new(submission_queue_and_completion_queue: &MappedMemory, submission_queue_offsets: &io_sqring_offsets, submission_queue_entries: MappedMemory) -> Self
{
let memory = submission_queue_and_completion_queue.virtual_address();
let this = Self
{
head: memory.aligned_pointer_to_value::<AtomicU32>(submission_queue_offsets.head as usize),
tail: memory.aligned_pointer_to_value::<AtomicU32>(submission_queue_offsets.head as usize),
ring_mask: memory.aligned_pointer_to_value::<u32>(submission_queue_offsets.head as usize),
ring_entries: memory.aligned_pointer_to_value::<u32>(submission_queue_offsets.head as usize),
flags: memory.aligned_pointer_to_value::<AtomicU32>(submission_queue_offsets.head as usize),
dropped: memory.aligned_pointer_to_value::<AtomicU32>(submission_queue_offsets.head as usize),
array: memory.aligned_pointer_to_value::<u32>(submission_queue_offsets.head as usize),
submission_queue_entries,
};
this.set_up_one_to_one_mapping_from_array_to_submission_queue_entry();
this
}
#[inline(always)]
fn set_up_one_to_one_mapping_from_array_to_submission_queue_entry(&self)
{
for index in 0 .. self.array_length()
{
let pointer = self.array_element(index).as_ptr();
let submission_queue_entry_index = index;
unsafe { pointer.write_volatile(submission_queue_entry_index) };
}
}
#[inline(always)]
pub(crate) fn push_submission_queue_entries<'add_entries, AddEntries: FnMut(SubmissionQueueEntry) -> bool>(&self, add_entries: &'add_entries mut AddEntries, using_kernel_submission_queue_poll: bool, using_io_poll: bool) -> Result<(), &'add_entries mut AddEntries>
{
let head = self.head_atomically();
let mut tail = self.tail_non_atomically();
let ring_mask = self.ring_mask();
let mut stop_pushing_entries;
loop
{
if unlikely!(self.is_full_internal(head, tail))
{
return Err(add_entries)
}
stop_pushing_entries = add_entries(self.next_submission_queue_entry(tail, ring_mask, using_kernel_submission_queue_poll, using_io_poll));
tail = tail.wrapping_add(1);
if stop_pushing_entries
{
break
}
}
self.store_tail_atomically(tail);
Ok(())
}
#[cfg_attr(not(debug_assertions), allow(unused))]
#[inline(always)]
fn next_submission_queue_entry(&self, tail: u32, ring_mask: u32, using_kernel_submission_queue_poll: bool, using_io_poll: bool) -> SubmissionQueueEntry
{
let pointer = self.submission_queue_entries.virtual_address().aligned_pointer_to_value((tail & ring_mask) as usize);
SubmissionQueueEntry
{
pointer,
#[cfg(debug_assertions)] using_kernel_submission_queue_poll,
#[cfg(debug_assertions)] using_io_poll,
}
}
#[inline(always)]
pub(crate) fn needs_to_wake_up_kernel_submission_queue_poll_thread(&self) -> bool
{
self.flags().contains(SubmissionQueueRingFlags::NeedsIoUringEnterWakeUp)
}
#[inline(always)]
pub(crate) fn is_empty(&self) -> bool
{
self.length() == 0
}
#[inline(always)]
pub(crate) fn is_full(&self) -> bool
{
self.length() == self.array_length()
}
#[inline(always)]
pub(crate) fn available(&self) -> u32
{
self.array_length() - self.length()
}
#[inline(always)]
pub(crate) fn length(&self) -> u32
{
let head = self.head_atomically();
let tail = self.tail_non_atomically();
Self::length_internal(head, tail)
}
#[inline(always)]
fn is_full_internal(&self, head: u32, tail: u32) -> bool
{
Self::length_internal(head, tail) == self.array_length()
}
#[inline(always)]
fn length_internal(head: u32, tail: u32) -> u32
{
tail.wrapping_sub(head)
}
#[inline(always)]
fn head_atomically(&self) -> u32
{
self.head.load_acquire()
}
#[inline(always)]
fn store_tail_atomically(&self, new_tail: u32)
{
self.tail.store_release(new_tail)
}
#[inline(always)]
fn tail_non_atomically(&self) -> u32
{
self.tail.load_non_atomically()
}
#[inline(always)]
fn ring_mask(&self) -> u32
{
self.ring_mask.unsynchronized_value()
}
#[inline(always)]
fn array_length(&self) -> u32
{
self.ring_entries.unsynchronized_value()
}
#[inline(always)]
fn array_element(&self, index: u32) -> NonNull<u32>
{
unsafe { new_non_null(self.array.as_ptr().add(index as usize)) }
}
#[inline(always)]
fn flags(&self) -> SubmissionQueueRingFlags
{
unsafe { transmute(self.flags.load_acquire()) }
}
#[inline(always)]
fn dropped(&self) -> u32
{
self.dropped.load_acquire()
}
}