1pub(crate) mod sealed {
4 use super::{Fd, Fixed};
5 use std::os::unix::io::RawFd;
6
7 #[derive(Debug)]
8 pub enum Target {
9 Fd(RawFd),
10 Fixed(u32),
11 }
12
13 pub trait UseFd: Sized {
14 fn into(self) -> RawFd;
15 }
16
17 pub trait UseFixed: Sized {
18 fn into(self) -> Target;
19 }
20
21 impl UseFd for Fd {
22 #[inline]
23 fn into(self) -> RawFd {
24 self.0
25 }
26 }
27
28 impl UseFixed for Fd {
29 #[inline]
30 fn into(self) -> Target {
31 Target::Fd(self.0)
32 }
33 }
34
35 impl UseFixed for Fixed {
36 #[inline]
37 fn into(self) -> Target {
38 Target::Fixed(self.0)
39 }
40 }
41}
42
43use crate::sys;
44use crate::util::{cast_ptr, unwrap_nonzero, unwrap_u32};
45use bitflags::bitflags;
46use std::convert::TryFrom;
47use std::marker::PhantomData;
48use std::num::NonZeroU32;
49use std::os::unix::io::RawFd;
50
51pub use sys::__kernel_rwf_t as RwFlags;
52
53#[repr(C)]
55#[allow(non_camel_case_types)]
56pub struct statx {
57 _priv: (),
58}
59
60#[repr(C)]
62#[allow(non_camel_case_types)]
63pub struct epoll_event {
64 _priv: (),
65}
66
67#[derive(Debug, Clone, Copy)]
69#[repr(transparent)]
70pub struct Fd(pub RawFd);
71
72#[derive(Debug, Clone, Copy)]
76#[repr(transparent)]
77pub struct Fixed(pub u32);
78
79bitflags! {
80 #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
92 pub struct TimeoutFlags: u32 {
93 const ABS = sys::IORING_TIMEOUT_ABS;
94
95 const BOOTTIME = sys::IORING_TIMEOUT_BOOTTIME;
96
97 const REALTIME = sys::IORING_TIMEOUT_REALTIME;
98
99 const LINK_TIMEOUT_UPDATE = sys::IORING_LINK_TIMEOUT_UPDATE;
100
101 const ETIME_SUCCESS = sys::IORING_TIMEOUT_ETIME_SUCCESS;
102
103 const MULTISHOT = sys::IORING_TIMEOUT_MULTISHOT;
104 }
105}
106
107bitflags! {
108 #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
110 pub struct FsyncFlags: u32 {
111 const DATASYNC = sys::IORING_FSYNC_DATASYNC;
112 }
113}
114
115bitflags! {
116 #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
119 pub(crate) struct AsyncCancelFlags: u32 {
120 const ALL = sys::IORING_ASYNC_CANCEL_ALL;
125
126 const FD = sys::IORING_ASYNC_CANCEL_FD;
131
132 const ANY = sys::IORING_ASYNC_CANCEL_ANY;
138
139 const FD_FIXED = sys::IORING_ASYNC_CANCEL_FD_FIXED;
144 }
145}
146
147#[derive(Default, Debug, Clone, Copy)]
150#[repr(transparent)]
151pub struct OpenHow(sys::open_how);
152
153impl OpenHow {
154 pub const fn new() -> Self {
155 OpenHow(sys::open_how {
156 flags: 0,
157 mode: 0,
158 resolve: 0,
159 })
160 }
161
162 pub const fn flags(mut self, flags: u64) -> Self {
163 self.0.flags = flags;
164 self
165 }
166
167 pub const fn mode(mut self, mode: u64) -> Self {
168 self.0.mode = mode;
169 self
170 }
171
172 pub const fn resolve(mut self, resolve: u64) -> Self {
173 self.0.resolve = resolve;
174 self
175 }
176}
177
178#[derive(Default, Debug, Clone, Copy)]
179#[repr(transparent)]
180pub struct Timespec(pub(crate) sys::__kernel_timespec);
181
182impl Timespec {
183 #[inline]
184 pub const fn new() -> Self {
185 Timespec(sys::__kernel_timespec {
186 tv_sec: 0,
187 tv_nsec: 0,
188 })
189 }
190
191 #[inline]
192 pub const fn sec(mut self, sec: u64) -> Self {
193 self.0.tv_sec = sec as _;
194 self
195 }
196
197 #[inline]
198 pub const fn nsec(mut self, nsec: u32) -> Self {
199 self.0.tv_nsec = nsec as _;
200 self
201 }
202}
203
204impl From<std::time::Duration> for Timespec {
205 fn from(value: std::time::Duration) -> Self {
206 Timespec::new()
207 .sec(value.as_secs())
208 .nsec(value.subsec_nanos())
209 }
210}
211
212#[derive(Default, Debug, Clone, Copy)]
232pub struct SubmitArgs<'prev: 'now, 'now> {
233 pub(crate) args: sys::io_uring_getevents_arg,
234 prev: PhantomData<&'prev ()>,
235 now: PhantomData<&'now ()>,
236}
237
238impl<'prev, 'now> SubmitArgs<'prev, 'now> {
239 #[inline]
240 pub const fn new() -> SubmitArgs<'static, 'static> {
241 let args = sys::io_uring_getevents_arg {
242 sigmask: 0,
243 sigmask_sz: 0,
244 min_wait_usec: 0,
245 ts: 0,
246 };
247
248 SubmitArgs {
249 args,
250 prev: PhantomData,
251 now: PhantomData,
252 }
253 }
254
255 #[inline]
256 pub fn sigmask<'new>(mut self, sigmask: &'new libc::sigset_t) -> SubmitArgs<'now, 'new> {
257 self.args.sigmask = cast_ptr(sigmask) as _;
258 self.args.sigmask_sz = std::mem::size_of::<libc::sigset_t>() as _;
259
260 SubmitArgs {
261 args: self.args,
262 prev: self.now,
263 now: PhantomData,
264 }
265 }
266
267 #[inline]
268 pub fn timespec<'new>(mut self, timespec: &'new Timespec) -> SubmitArgs<'now, 'new> {
269 self.args.ts = cast_ptr(timespec) as _;
270
271 SubmitArgs {
272 args: self.args,
273 prev: self.now,
274 now: PhantomData,
275 }
276 }
277}
278
279#[repr(transparent)]
280pub struct BufRingEntry(sys::io_uring_buf);
281
282#[allow(clippy::len_without_is_empty)]
284impl BufRingEntry {
285 pub fn set_addr(&mut self, addr: u64) {
287 self.0.addr = addr;
288 }
289
290 pub fn addr(&self) -> u64 {
292 self.0.addr
293 }
294
295 pub fn set_len(&mut self, len: u32) {
297 self.0.len = len;
298 }
299
300 pub fn len(&self) -> u32 {
302 self.0.len
303 }
304
305 pub fn set_bid(&mut self, bid: u16) {
307 self.0.bid = bid;
308 }
309
310 pub fn bid(&self) -> u16 {
312 self.0.bid
313 }
314
315 pub unsafe fn tail(ring_base: *const BufRingEntry) -> *const u16 {
328 std::ptr::addr_of!((*ring_base).0.resv)
329 }
330}
331
332#[derive(Debug, Clone, Copy)]
335pub struct DestinationSlot {
336 dest: NonZeroU32,
338}
339
340impl DestinationSlot {
341 const AUTO_ALLOC: NonZeroU32 =
343 unwrap_nonzero(NonZeroU32::new(sys::IORING_FILE_INDEX_ALLOC as u32));
344
345 pub const fn auto_target() -> Self {
347 Self {
348 dest: DestinationSlot::AUTO_ALLOC,
349 }
350 }
351
352 pub fn try_from_slot_target(target: u32) -> Result<Self, u32> {
356 const MAX_INDEX: u32 = unwrap_u32(DestinationSlot::AUTO_ALLOC.get().checked_sub(2));
358
359 if target > MAX_INDEX {
360 return Err(target);
361 }
362
363 let kernel_index = target.saturating_add(1);
364 debug_assert!(0 < kernel_index && kernel_index < DestinationSlot::AUTO_ALLOC.get());
366 let dest = NonZeroU32::new(kernel_index).unwrap();
367
368 Ok(Self { dest })
369 }
370
371 pub(crate) fn kernel_index_arg(&self) -> u32 {
372 self.dest.get()
373 }
374}
375
376#[derive(Debug)]
378pub struct RecvMsgOut<'buf> {
379 header: sys::io_uring_recvmsg_out,
380 msghdr_name_len: usize,
386
387 name_data: &'buf [u8],
388 control_data: &'buf [u8],
389 payload_data: &'buf [u8],
390}
391
392impl<'buf> RecvMsgOut<'buf> {
393 const DATA_START: usize = std::mem::size_of::<sys::io_uring_recvmsg_out>();
394
395 #[allow(clippy::result_unit_err)]
401 #[allow(clippy::useless_conversion)]
402 pub fn parse(buffer: &'buf [u8], msghdr: &libc::msghdr) -> Result<Self, ()> {
403 let msghdr_name_len = usize::try_from(msghdr.msg_namelen).unwrap();
404 let msghdr_control_len = usize::try_from(msghdr.msg_controllen).unwrap();
405
406 if Self::DATA_START
407 .checked_add(msghdr_name_len)
408 .and_then(|acc| acc.checked_add(msghdr_control_len))
409 .map(|header_len| buffer.len() < header_len)
410 .unwrap_or(true)
411 {
412 return Err(());
413 }
414 let header = unsafe {
416 buffer
417 .as_ptr()
418 .cast::<sys::io_uring_recvmsg_out>()
419 .read_unaligned()
420 };
421
422 let (name_data, control_start) = {
425 let name_start = Self::DATA_START;
426 let name_data_end =
427 name_start + usize::min(usize::try_from(header.namelen).unwrap(), msghdr_name_len);
428 let name_field_end = name_start + msghdr_name_len;
429 (&buffer[name_start..name_data_end], name_field_end)
430 };
431 let (control_data, payload_start) = {
432 let control_data_end = control_start
433 + usize::min(
434 usize::try_from(header.controllen).unwrap(),
435 msghdr_control_len,
436 );
437 let control_field_end = control_start + msghdr_control_len;
438 (&buffer[control_start..control_data_end], control_field_end)
439 };
440 let payload_data = {
441 let payload_data_end = payload_start
442 + usize::min(
443 usize::try_from(header.payloadlen).unwrap(),
444 buffer.len() - payload_start,
445 );
446 &buffer[payload_start..payload_data_end]
447 };
448
449 Ok(Self {
450 header,
451 msghdr_name_len,
452 name_data,
453 control_data,
454 payload_data,
455 })
456 }
457
458 pub fn incoming_name_len(&self) -> u32 {
465 self.header.namelen
466 }
467
468 pub fn is_name_data_truncated(&self) -> bool {
473 self.header.namelen as usize > self.msghdr_name_len
474 }
475
476 pub fn name_data(&self) -> &[u8] {
478 self.name_data
479 }
480
481 pub fn incoming_control_len(&self) -> u32 {
488 self.header.controllen
489 }
490
491 pub fn is_control_data_truncated(&self) -> bool {
496 (self.header.flags & u32::try_from(libc::MSG_CTRUNC).unwrap()) != 0
497 }
498
499 pub fn control_data(&self) -> &[u8] {
501 self.control_data
502 }
503
504 pub fn is_payload_truncated(&self) -> bool {
509 (self.header.flags & u32::try_from(libc::MSG_TRUNC).unwrap()) != 0
510 }
511
512 pub fn payload_data(&self) -> &[u8] {
514 self.payload_data
515 }
516
517 pub fn incoming_payload_len(&self) -> u32 {
524 self.header.payloadlen
525 }
526
527 pub fn flags(&self) -> u32 {
529 self.header.flags
530 }
531}
532
533#[derive(Debug)]
559pub struct CancelBuilder {
560 pub(crate) flags: AsyncCancelFlags,
561 pub(crate) user_data: Option<u64>,
562 pub(crate) fd: Option<sealed::Target>,
563}
564
565impl CancelBuilder {
566 pub const fn any() -> Self {
572 Self {
573 flags: AsyncCancelFlags::ANY,
574 user_data: None,
575 fd: None,
576 }
577 }
578
579 pub const fn user_data(user_data: u64) -> Self {
586 Self {
587 flags: AsyncCancelFlags::empty(),
588 user_data: Some(user_data),
589 fd: None,
590 }
591 }
592
593 pub fn fd(fd: impl sealed::UseFixed) -> Self {
601 let mut flags = AsyncCancelFlags::FD;
602 let target = fd.into();
603 if matches!(target, sealed::Target::Fixed(_)) {
604 flags.insert(AsyncCancelFlags::FD_FIXED);
605 }
606 Self {
607 flags,
608 user_data: None,
609 fd: Some(target),
610 }
611 }
612
613 pub fn all(mut self) -> Self {
620 self.flags.insert(AsyncCancelFlags::ALL);
621 self
622 }
623
624 pub(crate) fn to_fd(&self) -> i32 {
625 self.fd
626 .as_ref()
627 .map(|target| match *target {
628 sealed::Target::Fd(fd) => fd,
629 sealed::Target::Fixed(idx) => idx as i32,
630 })
631 .unwrap_or(-1)
632 }
633}
634
635#[derive(Default, Debug, Clone, Copy)]
638#[repr(transparent)]
639pub struct FutexWaitV(sys::futex_waitv);
640
641impl FutexWaitV {
642 pub const fn new() -> Self {
643 Self(sys::futex_waitv {
644 val: 0,
645 uaddr: 0,
646 flags: 0,
647 __reserved: 0,
648 })
649 }
650
651 pub const fn val(mut self, val: u64) -> Self {
652 self.0.val = val;
653 self
654 }
655
656 pub const fn uaddr(mut self, uaddr: u64) -> Self {
657 self.0.uaddr = uaddr;
658 self
659 }
660
661 pub const fn flags(mut self, flags: u32) -> Self {
662 self.0.flags = flags;
663 self
664 }
665}
666
667#[cfg(test)]
668mod tests {
669 use std::time::Duration;
670
671 use crate::types::sealed::Target;
672
673 use super::*;
674
675 #[test]
676 fn timespec_from_duration_converts_correctly() {
677 let duration = Duration::new(2, 500);
678 let timespec = Timespec::from(duration);
679
680 assert_eq!(timespec.0.tv_sec as u64, duration.as_secs());
681 assert_eq!(timespec.0.tv_nsec as u32, duration.subsec_nanos());
682 }
683
684 #[test]
685 fn test_cancel_builder_flags() {
686 let cb = CancelBuilder::any();
687 assert_eq!(cb.flags, AsyncCancelFlags::ANY);
688
689 let mut cb = CancelBuilder::user_data(42);
690 assert_eq!(cb.flags, AsyncCancelFlags::empty());
691 assert_eq!(cb.user_data, Some(42));
692 assert!(cb.fd.is_none());
693 cb = cb.all();
694 assert_eq!(cb.flags, AsyncCancelFlags::ALL);
695
696 let mut cb = CancelBuilder::fd(Fd(42));
697 assert_eq!(cb.flags, AsyncCancelFlags::FD);
698 assert!(matches!(cb.fd, Some(Target::Fd(42))));
699 assert!(cb.user_data.is_none());
700 cb = cb.all();
701 assert_eq!(cb.flags, AsyncCancelFlags::FD | AsyncCancelFlags::ALL);
702
703 let mut cb = CancelBuilder::fd(Fixed(42));
704 assert_eq!(cb.flags, AsyncCancelFlags::FD | AsyncCancelFlags::FD_FIXED);
705 assert!(matches!(cb.fd, Some(Target::Fixed(42))));
706 assert!(cb.user_data.is_none());
707 cb = cb.all();
708 assert_eq!(
709 cb.flags,
710 AsyncCancelFlags::FD | AsyncCancelFlags::FD_FIXED | AsyncCancelFlags::ALL
711 );
712 }
713}