aranya_capi_core/safe.rs
1//! Types that make FFI a little bit safer.
2
3use core::{
4 cmp::Ordering,
5 ffi::c_char,
6 fmt,
7 hash::{Hash, Hasher},
8 marker::{PhantomData, PhantomPinned},
9 mem::{self, ManuallyDrop, MaybeUninit, size_of},
10 ops::{BitAnd, BitAndAssign, BitOr, BitOrAssign, Deref, DerefMut, Not},
11 ptr::{self, NonNull},
12 slice, str,
13};
14
15use aranya_libc::Path;
16use tracing::{error, instrument, warn};
17
18use crate::{
19 InvalidSlice,
20 internal::conv::{
21 alias::Alias,
22 newtype::NewType,
23 slice::{try_from_raw_parts, try_from_raw_parts_mut},
24 },
25 traits::InitDefault,
26};
27
28/// Errors returned by [`Safe`].
29#[derive(Copy, Clone, Debug, Eq, PartialEq, thiserror::Error)]
30pub enum Error {
31 /// The address of a [`Safe`] changed.
32 #[error("address changed")]
33 AddrChanged,
34 /// A [`Safe`] is already initialized.
35 #[error("already initialized")]
36 AlreadyInitialized,
37 /// The pointer is invalid.
38 #[error(transparent)]
39 InvalidPtr(#[from] InvalidPtr),
40 /// The slice is invalid.
41 #[error(transparent)]
42 InvalidSlice(#[from] InvalidSlice),
43 /// The type identifier is invalid for `T`.
44 #[error("invalid type")]
45 InvalidType,
46 /// The type is uninitialized.
47 #[error("uninitialized")]
48 Uninitialized,
49}
50
51/// A wrapper around `T` that attemps to limit the scope of
52/// certain types of undefined behavior.
53///
54/// # Undefined Behavior Mitigations
55///
56/// It is important to note that `Safe` does not (and cannot)
57/// _prevent_ undefined behavior. The following mitigations only
58/// help limit the scope of the UB. For example, it is still
59/// undefined behavior to use an uninitialized `Safe`. However,
60/// the "Uninitialized Memory" mitigation limits the UB to
61/// reading `Safe`'s uninitialized flags instead of reading from
62/// (or writing to) the uninitialized inner `T`.
63///
64/// ## Type Confusion
65///
66/// `Safe` contains a unique type identifier for each type `T`
67/// (see [`Typed`]). An error is returned if the type identifier
68/// does not match the expected type identifier for `T`.
69///
70/// ## Uninitialized Memory
71///
72/// Type identifiers are random, so it's unlikely that
73/// uninitialized memory will have the same bit pattern.
74///
75/// `Safe` also has an `INIT` flag that is set to true after the
76/// inner `T` is initialized. An error is returned if the flag is
77/// false.
78///
79/// ## Use After Cleanup (Free)
80///
81/// [`Safe`]'s `Drop` impl zeros out the type identifier and
82/// flags, ensuring that it cannot be used after the inner `T`
83/// has been dropped.
84#[repr(C)]
85#[non_exhaustive]
86pub struct Safe<T: Typed> {
87 // Should be `T::TYPE_ID`.
88 id: TypeId,
89 flags: Flags,
90 addr: usize,
91 inner: MaybeUninit<T>,
92 _unpin: PhantomPinned,
93}
94
95impl<T: Typed> Safe<T> {
96 /// Writes an initialized `Safe` to `out`.
97 pub fn init(out: &mut MaybeUninit<Self>, v: T) {
98 let addr = ptr::from_mut::<MaybeUninit<Self>>(out) as usize;
99 out.write(Self {
100 id: T::id(),
101 flags: Flags::INIT,
102 addr,
103 inner: MaybeUninit::new(v),
104 _unpin: PhantomPinned,
105 });
106 }
107
108 /// Is the type ID correct?
109 fn is_valid(&self) -> bool {
110 self.id == T::id()
111 }
112
113 /// Is the type initialized?
114 fn is_init(&self) -> bool {
115 self.flags & Flags::INIT != 0
116 }
117
118 /// Did the address change?
119 fn addr_changed(&self) -> bool {
120 self.addr != ptr::from_ref::<Self>(self) as usize
121 }
122
123 #[cfg(not(debug_assertions))]
124 fn name(&self) -> tracing::field::Empty {
125 tracing::field::Empty
126 }
127
128 #[cfg(debug_assertions)]
129 fn name(&self) -> &'static str {
130 core::any::type_name::<Self>()
131 }
132
133 /// Checks that `self` is valid, has been initialized, and
134 /// has not been moved.
135 ///
136 /// This should only be called when receiving a pointer from
137 /// external code, like C.
138 fn check(&self) -> Result<(), Error> {
139 if !self.is_valid() {
140 error!(
141 got = %self.id,
142 want = %T::id(),
143 name = self.name(),
144 "invalid type ID",
145 );
146 Err(Error::InvalidType)
147 } else if !self.is_init() {
148 error!(flags = %self.flags, name = self.name(), "not initialized");
149 Err(Error::Uninitialized)
150 } else if self.addr_changed() {
151 error!(
152 old = %Hex(self.addr),
153 new = %Hex(ptr::from_ref::<Self>(self) as usize),
154 id = %self.id,
155 name = self.name(),
156 "address changed"
157 );
158 Err(Error::AddrChanged)
159 } else {
160 Ok(())
161 }
162 }
163
164 /// Like [`check`][Self::check], but does not check for
165 /// a changed address.
166 fn sanity_check(&self) {
167 #[allow(clippy::panic, reason = "panicking only under debug_assertions")]
168 if cfg!(debug_assertions) {
169 if !self.is_valid() {
170 error!(
171 got = %self.id,
172 want = %T::id(),
173 name = self.name(),
174 "invalid type ID",
175 );
176 panic!("invalid type ID")
177 } else if !self.is_init() {
178 error!(flags = %self.flags, name = self.name(), "not initialized");
179 panic!("not initialized")
180 } else if self.addr_changed() {
181 warn!(
182 old = %Hex(self.addr),
183 new = %Hex(ptr::from_ref::<Self>(self) as usize),
184 id = %self.id,
185 name = self.name(),
186 "address changed"
187 );
188 }
189 }
190 // NB: We skip the address change because it's okay for
191 // Rust to move this type around, but not external code.
192 }
193
194 /// Returns a shared reference from `ptr`.
195 ///
196 /// # Safety
197 ///
198 /// - The pointer must be initialized.
199 /// - You must uphold Rust's aliasing rules.
200 #[instrument]
201 pub unsafe fn try_from_ptr<'a>(ptr: *const Self) -> Result<&'a Self, Error> {
202 let v = Valid::<Self>::new(ptr.cast_mut()).map_err(Error::from)?;
203 // SAFETY: See the function's safety docs.
204 unsafe { v.as_ref() }.check()?;
205 // SAFETY: See the function's safety docs.
206 Ok(unsafe { v.as_ref() })
207 }
208
209 /// Returns an exclusive reference from `ptr`.
210 ///
211 /// # Safety
212 ///
213 /// - The pointer must be initialized.
214 /// - You must uphold Rust's aliasing rules.
215 #[instrument]
216 pub unsafe fn try_from_mut_ptr<'a>(ptr: *mut Self) -> Result<&'a mut Self, Error> {
217 let mut v = Valid::<Self>::new(ptr).map_err(Error::from)?;
218 // SAFETY: See the function's safety docs.
219 unsafe { v.as_ref() }.check()?;
220 // SAFETY: See the function's safety docs.
221 Ok(unsafe { v.as_mut() })
222 }
223
224 /// Returns a possibly uninitialized exclusive reference from
225 /// `ptr`.
226 ///
227 /// # Safety
228 ///
229 /// - You must uphold Rust's aliasing rules.
230 #[instrument]
231 pub unsafe fn try_from_uninit_mut_ptr<'a>(
232 ptr: *mut MaybeUninit<Self>,
233 ) -> Result<&'a mut MaybeUninit<Self>, Error> {
234 let mut v = Valid::<MaybeUninit<Self>>::new(ptr).map_err(Error::from)?;
235 // SAFETY: See the function's safety docs.
236 Ok(unsafe { v.as_mut() })
237 }
238
239 /// Returns an [`OwnedPtr`] from `ptr`.
240 ///
241 /// # Safety
242 ///
243 /// - The pointer must be initialized.
244 /// - You must uphold Rust's aliasing rules.
245 #[instrument]
246 pub unsafe fn try_from_owned_ptr(ptr: *mut Self) -> Result<OwnedPtr<Self>, Error> {
247 // TODO(eric): `ptr.cast()` or make `ptr: *mut
248 // ManuallyDrop<Self>`?
249 let v = Valid::<ManuallyDrop<Self>>::new(ptr.cast()).map_err(Error::from)?;
250 // SAFETY: See the function's safety docs.
251 unsafe { v.as_ref() }.check()?;
252 // SAFETY: All `Valid`s are non-null and suitably
253 // aligned. See the function's safety docs for the rest.
254 Ok(unsafe { OwnedPtr::from_valid(v) })
255 }
256
257 /// Consumes the `Safe`, returning its inner data.
258 pub fn into_inner(mut self) -> T {
259 self.sanity_check();
260
261 let inner = mem::replace(&mut self.inner, MaybeUninit::uninit());
262
263 self.flags &= !Flags::INIT;
264 debug_assert_eq!(self.flags, Flags::NONE);
265
266 // SAFETY: The header is correct, so we have to assume
267 // that `inner` is indeed initialized.
268 unsafe { inner.assume_init() }
269 }
270
271 fn as_ref(&self) -> &T {
272 self.sanity_check();
273
274 // SAFETY: The header is correct, so we have to assume
275 // that `inner` is indeed initialized.
276 unsafe { self.inner.assume_init_ref() }
277 }
278
279 fn as_mut(&mut self) -> &mut T {
280 self.sanity_check();
281
282 // SAFETY: The header is correct, so we have to assume
283 // that `inner` is indeed initialized.
284 unsafe { self.inner.assume_init_mut() }
285 }
286}
287
288impl<T: Typed + Default> InitDefault for Safe<T> {
289 fn init_default(out: &mut MaybeUninit<Self>) {
290 Self::init(out, T::default());
291 }
292}
293
294impl<T: Typed> Drop for Safe<T> {
295 fn drop(&mut self) {
296 tracing::debug!(addr = ptr::from_mut::<Self>(self) as usize, "dropping");
297 debug_assert_eq!(self.id, T::id());
298
299 if !self.is_valid() {
300 // We shouldn't ever hit this code path. But `Drop`
301 // isn't fallible, so there isn't anything we can
302 // really do here.
303 return;
304 }
305 if !self.is_init() {
306 // This might happen if we call `Safe::into_inner`.
307 return;
308 }
309
310 self.id = TypeId::UNSET;
311
312 // SAFETY: Although we have checked the header, we still
313 // have to trust that `inner` is valid to be dropped.
314 unsafe { self.inner.assume_init_drop() }
315
316 self.flags &= !Flags::INIT;
317
318 debug_assert_eq!(self.id, TypeId::UNSET);
319 debug_assert_eq!(self.flags, Flags::NONE);
320 }
321}
322
323impl<T: Typed + Eq> Eq for Safe<T> {}
324impl<T: Typed + PartialEq> PartialEq for Safe<T> {
325 fn eq(&self, other: &Self) -> bool {
326 // Ignore `addr` since it could be different for two
327 // different objects.
328 self.id == other.id && self.flags == other.flags && self.as_ref() == other.as_ref()
329 }
330}
331
332impl<T: Typed> Deref for Safe<T> {
333 type Target = T;
334
335 fn deref(&self) -> &Self::Target {
336 self.as_ref()
337 }
338}
339
340impl<T: Typed> DerefMut for Safe<T> {
341 fn deref_mut(&mut self) -> &mut Self::Target {
342 self.as_mut()
343 }
344}
345
346impl<T: Typed + fmt::Debug> fmt::Debug for Safe<T> {
347 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
348 f.debug_struct("Safe<T>")
349 .field("id", &self.id)
350 .field("flags", &self.flags)
351 .field("addr", &self.addr)
352 .field("inner", self.as_ref())
353 .finish()
354 }
355}
356
357/// Implemented by types that can be used with [`Safe`].
358pub trait Typed {
359 /// Uniquely identifies the type.
360 fn id() -> TypeId;
361}
362
363impl<T: core::any::Any> Typed for T {
364 fn id() -> TypeId {
365 let mut id = {
366 let mut hasher = fnv::FnvHasher::default();
367 core::any::TypeId::of::<T>().hash(&mut hasher);
368 hasher.finish() as u32
369 };
370 // This is very unlikely but it doesn't hurt to check.
371 if id == 0 {
372 id = !0;
373 }
374 TypeId(id)
375 }
376}
377
378/// Uniquely identifies types.
379#[derive(Copy, Clone, Debug, Eq, PartialEq)]
380#[repr(transparent)]
381pub struct TypeId(u32);
382
383impl TypeId {
384 /// The default value of `TypeId`.
385 pub const UNSET: Self = Self(0);
386
387 /// Creates a new type ID.
388 ///
389 /// It must not be [`UNSET`][Self::UNSET].
390 pub const fn new(id: u32) -> Self {
391 Self(id)
392 }
393}
394
395impl fmt::Display for TypeId {
396 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
397 self.0.fmt(f)
398 }
399}
400
401/// Flags used by [`Safe`].
402#[derive(Copy, Clone, Default, Debug, Eq, PartialEq)]
403#[repr(transparent)]
404struct Flags(u32);
405
406impl Flags {
407 /// No flags are set.
408 pub const NONE: Self = Self(0);
409 /// Enabled only after [`Safe`]'s inner `T` has been
410 /// initialized.
411 pub const INIT: Self = Self(1 << 0);
412}
413
414impl fmt::Display for Flags {
415 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
416 fmt::Binary::fmt(&self.0, f)
417 }
418}
419
420impl PartialEq<u32> for Flags {
421 fn eq(&self, other: &u32) -> bool {
422 self.0 == *other
423 }
424}
425
426impl BitAnd for Flags {
427 type Output = Self;
428 fn bitand(self, other: Self) -> Self {
429 Self(self.0 & other.0)
430 }
431}
432impl BitAndAssign for Flags {
433 fn bitand_assign(&mut self, other: Self) {
434 *self = *self & other;
435 }
436}
437
438impl BitOr for Flags {
439 type Output = Self;
440 fn bitor(self, other: Self) -> Self {
441 Self(self.0 | other.0)
442 }
443}
444impl BitOrAssign for Flags {
445 fn bitor_assign(&mut self, other: Self) {
446 *self = *self | other;
447 }
448}
449
450impl Not for Flags {
451 type Output = Self;
452 fn not(self) -> Self {
453 Self(!self.0)
454 }
455}
456
457/// Essentially the same thing as [`Valid`], but indicates
458/// ownership of the pointed-to data.
459///
460/// NB: `OwnedPtr` does not implement `Drop`. You must either
461/// call [`read`][Self::read] or
462/// [`drop_in_place`][Self::drop_in_place].
463#[repr(transparent)]
464pub struct OwnedPtr<T> {
465 ptr: Valid<ManuallyDrop<T>>,
466 _marker: PhantomData<T>,
467}
468
469// Check that we take advantage of `NonNull`'s niche
470// optimizations.
471#[allow(clippy::assertions_on_constants)]
472const _: () = {
473 const WANT: usize = size_of::<OwnedPtr<()>>();
474 const GOT: usize = size_of::<Option<OwnedPtr<()>>>();
475 assert!(GOT == WANT);
476};
477
478impl<T> OwnedPtr<T> {
479 /// Creates a new `Owned`.
480 ///
481 /// # Safety
482 ///
483 /// - `ptr` must be initialized.
484 /// - You must uphold Rust's lifetime rules. Specifically,
485 /// `OwnedPtr` now owns `ptr`.
486 pub unsafe fn new(ptr: *mut ManuallyDrop<T>) -> Result<Self, InvalidPtr> {
487 // SAFETY: See this method's safety docs.
488 Ok(unsafe { Self::from_valid(Valid::new(ptr)?) })
489 }
490
491 /// Creates a new `Owned`.
492 ///
493 /// # Safety
494 ///
495 /// - `ptr` must be initialized.
496 /// - You must uphold Rust's lifetime rules. Specifically,
497 /// `OwnedPtr` now owns `ptr`.
498 pub const unsafe fn from_valid(ptr: Valid<ManuallyDrop<T>>) -> Self {
499 Self {
500 ptr,
501 _marker: PhantomData,
502 }
503 }
504
505 /// Consumes the owned pointer and returns the inner data.
506 ///
507 /// # Safety
508 ///
509 /// - The pointer must be live.
510 #[must_use]
511 pub unsafe fn read(self) -> T {
512 // SAFETY: `Valid` is always non-null and suitably
513 // aligned.
514 let xref = unsafe { &mut *(self.ptr.as_mut_ptr()) };
515 // SAFETY: `read` consumes `self`, so the `ManuallyDrop`
516 // cannot be used again.
517 unsafe { ManuallyDrop::take(xref) }
518 }
519
520 /// Executes the destructor, if any, for `T`.
521 ///
522 /// # Safety
523 ///
524 /// - The pointer must be live.
525 pub unsafe fn drop_in_place(self) {
526 // SAFETY: `Valid` is always non-null and suitably
527 // aligned.
528 let xref = unsafe { &mut *(self.ptr.as_mut_ptr()) };
529 // SAFETY: `drop_in_place` consumes `self`, so the
530 // `ManuallyDrop` cannot be used again.
531 unsafe { ManuallyDrop::drop(xref) }
532 }
533
534 /// Returns the address of the owned pointer.
535 ///
536 /// This is used by `capi-macro`.
537 #[doc(hidden)]
538 pub fn addr(&self) -> usize {
539 self.ptr.as_ptr() as usize
540 }
541}
542
543impl<T> fmt::Debug for OwnedPtr<T> {
544 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
545 fmt::Pointer::fmt(&self.ptr, f)
546 }
547}
548
549impl<T> fmt::Pointer for OwnedPtr<T> {
550 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
551 fmt::Pointer::fmt(&self.ptr, f)
552 }
553}
554
555// SAFETY: `T` is `NewType`.
556unsafe impl<T: NewType> NewType for OwnedPtr<T> {
557 type Inner = OwnedPtr<T::Inner>;
558}
559
560// SAFETY: `T: Alias<U>`, so the alias is sound.
561unsafe impl<T, U> Alias<U> for OwnedPtr<T>
562where
563 T: Alias<U>,
564 U: Sized,
565{
566}
567
568/// Like `&[u8]`, but with a pointer from C.
569#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq, Ord, PartialOrd)]
570pub struct CBytes {
571 ptr: Valid<u8>,
572 len: usize,
573}
574
575impl CBytes {
576 /// Creates a `CBytes`.
577 ///
578 /// - If `ptr` is non-null, `len` must be non-zero.
579 /// - If `ptr` is null, `len` must be zero.
580 ///
581 /// # Safety
582 ///
583 /// - If non-null, `ptr` must be valid for reads up to `len`
584 /// bytes.
585 pub unsafe fn new(ptr: *const u8, len: usize) -> Result<Self, Error> {
586 // SAFETY: See the method's safety docs. We uphold the
587 // other aliasing and lifetime requirements.
588 let s = unsafe { try_from_raw_parts(ptr, len)? };
589 Ok(Self::from_slice(s))
590 }
591
592 /// Creates `CBytes` from a slice.
593 pub const fn from_slice(s: &[u8]) -> Self {
594 Self {
595 // SAFETY: The pointer is coming from a ref, so it is
596 // valid and aligned.
597 ptr: unsafe { Valid::new_unchecked(s.as_ptr().cast_mut()) },
598 len: s.len(),
599 }
600 }
601
602 /// Returns the `CBytes` as a slice.
603 ///
604 /// # Safety
605 ///
606 /// - You must uphold Rust's lifetimes.
607 pub unsafe fn as_bytes(&self) -> &[u8] {
608 // SAFETY:
609 //
610 // - `self.ptr` is always non-null and suitably aligned.
611 // - `self.len` is always valid for `self.ptr`.
612 // - See the method's safety docs.
613 unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len) }
614 }
615}
616
617/// A non-null, suitably aligned C string.
618///
619/// It has the same size and alignment as [`*const
620/// c_char`][c_char].
621///
622/// Unlike [`core::ffi::CStr`], it is FFI safe.
623#[repr(transparent)]
624#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq, Ord, PartialOrd)]
625pub struct CStr {
626 ptr: Valid<c_char>,
627}
628
629impl CStr {
630 /// Unsafely creates a `CStr` from `&[u8]`.
631 ///
632 /// # Safety
633 ///
634 /// - The null terminator must be within [`isize::MAX`] bytes
635 /// from `ptr`.
636 pub const unsafe fn new_unchecked(bytes: &[u8]) -> Self {
637 Self {
638 // SAFETY: See the method's docs.
639 ptr: unsafe { Valid::new_unchecked(bytes.as_ptr().cast::<c_char>().cast_mut()) },
640 }
641 }
642
643 /// Returns a `CStr` from `ptr`.
644 ///
645 /// # Safety
646 ///
647 /// - The pointer must be initialized.
648 /// - You must uphold Rust's aliasing rules.
649 /// - The null terminator must be within [`isize::MAX`] bytes
650 /// from `ptr`.
651 pub unsafe fn try_from_ptr(ptr: *const c_char) -> Result<Self, Error> {
652 let ptr = Valid::new(ptr.cast_mut()).map_err(Error::from)?;
653 Ok(Self { ptr })
654 }
655
656 /// Converts the `CStr` into a [`Path`].
657 ///
658 /// # Safety
659 ///
660 /// - You must uphold Rust's aliasing rules.
661 pub unsafe fn into_path<'a>(self) -> &'a Path {
662 // SAFETY: The pointer is valid because of `Valid`, but
663 // we have to assume everything else.
664 unsafe { Path::from_ptr(self.ptr.as_ptr()) }
665 }
666
667 /// Converts the `CStr` into a raw `*const c_char`.
668 pub const fn as_ptr(self) -> *const c_char {
669 self.ptr.as_ptr()
670 }
671}
672
673/// A thin wrapper around `(*mut T, *mut usize)`.
674///
675/// `*mut usize` contains the number of elements in the buffer
676/// `*mut T`. After calling [`copy_to`][Writer::copy_to], `*mut
677/// usize` is updated with the number of elements written to the
678/// buffer `*mut T`. If the buffer is too small to fit all the
679/// elements, `*mut usize` is updated with the required number of
680/// elements and [`copy_to`][Writer::copy_to] returns
681/// [`OutOfSpace`].
682///
683/// # Example
684///
685/// ```rust
686/// use core::ptr;
687///
688/// use aranya_capi_core::safe::Writer;
689///
690/// /// Writes `hello, world!` to `ptr`, which has a length of
691/// /// `*len`.
692/// ///
693/// /// Reports whether there was enough space to write the
694/// /// entire message to `ptr` and updates `len` with `"hello,
695/// /// world!".len()`.
696/// unsafe extern "C" fn write_hello_world(ptr: *mut u8, len: *mut usize) -> bool {
697/// let mut w = unsafe {
698/// // NB: A real function would return an error instead
699/// // of unwrapping.
700/// Writer::try_from_raw_parts(ptr, len).unwrap()
701/// };
702/// w.copy_to(|buf| {
703/// // NB: You can write as many times as necessary.
704/// buf.write_all(b"hello,");
705/// buf.write_all(b" world");
706/// buf.write_all(b"!");
707/// Ok::<(), ()>(())
708/// })
709/// .is_ok()
710/// }
711///
712/// // Retrieve the number of elements we want to write.
713/// let mut len = 0;
714/// let mut buf = vec![0u8; len];
715///
716/// let wrote_all = {
717/// // (ptr, len) must be either
718/// // - (non-null, >0)
719/// // - (null, 0)
720/// let ptr = if len == 0 {
721/// ptr::null_mut()
722/// } else {
723/// buf.as_mut_ptr()
724/// };
725/// unsafe { write_hello_world(ptr, &mut len) }
726/// };
727/// if !wrote_all {
728/// buf.resize(len, 0);
729/// unsafe {
730/// write_hello_world(buf.as_mut_ptr(), &mut len);
731/// }
732/// }
733/// assert_eq!(len, b"hello, world!".len());
734/// assert_eq!(&buf[..len], b"hello, world!");
735/// ```
736// TODO(eric): Give this a more descriptive name?
737pub struct Writer<T> {
738 ptr: Valid<T>, // slice pointer
739 len: usize, // slice length
740 nw: Valid<usize>, // total bytes attempted to write
741}
742
743impl<T> Writer<T> {
744 /// Creates a `Writer`.
745 ///
746 /// # Safety
747 ///
748 /// - The memory pointed to by `len` must be initialized.
749 /// - If non-null, `ptr` must be valid for reads up to `*len`
750 /// bytes.
751 /// - You must uphold Rust's lifetimes.
752 /// - You must uphold Rust's aliasing guarantees.
753 pub unsafe fn try_from_raw_parts(ptr: *mut T, len: *mut usize) -> Result<Self, Error> {
754 let len = Valid::new(len)?;
755
756 // Check that (ptr, len) is valid. We don't save the
757 // resulting slice to simplify the API (no lifetimes,
758 // etc.).
759 let slice = {
760 // SAFETY: See the method's safety docs.
761 let len = unsafe { len.read() };
762 // SAFETY: See the method's safety docs.
763 unsafe { try_from_raw_parts_mut(ptr, len)? }
764 };
765
766 // We haven't written anything yet.
767 unsafe {
768 len.write(0);
769 }
770
771 Ok(Self {
772 // SAFETY: `slice` is a ref, so its pointer is always
773 // non-null and suitably aligned.
774 ptr: unsafe { Valid::new_unchecked(slice.as_mut_ptr()) },
775 len: slice.len(),
776 nw: len,
777 })
778 }
779}
780
781impl<T: Copy> Writer<T> {
782 /// Invokes `f` until it returns `Ok(0)` or `Err(E)`.
783 ///
784 /// # Safety
785 ///
786 /// - The writer's pointer must be live.
787 pub unsafe fn copy_to<F, R>(self, f: F) -> Result<R, OutOfSpace>
788 where
789 F: FnOnce(&mut Buffer<'_, T>) -> R,
790 {
791 // SAFETY: The constructor checks these safety
792 // requirements.
793 let dst = unsafe { slice::from_raw_parts_mut(self.ptr.as_mut_ptr(), self.len) };
794 let mut buf = Buffer { dst, nw: 0 };
795 let res = f(&mut buf);
796 // Update `nw` even if we don't have enough space in
797 // order to report to the caller the total amount of
798 // space needed.
799 unsafe {
800 self.nw.write(buf.nw);
801 }
802 if buf.nw > dst.len() {
803 Err(OutOfSpace(()))
804 } else {
805 Ok(res)
806 }
807 }
808}
809
810/// A buffer.
811// TODO(eric): implement std::io::Write?
812pub struct Buffer<'a, T> {
813 dst: &'a mut [T],
814 nw: usize,
815}
816
817impl<T: Copy> Buffer<'_, T> {
818 /// Writes the entirety of `data` to `self`.
819 pub fn write_all(&mut self, data: &[T]) {
820 let start = self.nw;
821 // Update `nw` even if we don't have enough space in
822 // order to report to the caller the total amount of
823 // space needed.
824 self.nw = start.saturating_add(data.len());
825 if let Some(dst) = self.dst.get_mut(start..self.nw) {
826 dst.copy_from_slice(data);
827 }
828 }
829}
830
831#[cfg(feature = "ciborium")]
832impl ciborium_io::Write for &mut Buffer<'_, u8> {
833 type Error = ();
834
835 fn write_all(&mut self, data: &[u8]) -> Result<(), Self::Error> {
836 Buffer::write_all(self, data);
837 Ok(())
838 }
839 fn flush(&mut self) -> Result<(), Self::Error> {
840 Ok(())
841 }
842}
843
844/// Not enough space to write data to [`Writer`].
845#[derive(Clone, Debug, Eq, PartialEq, thiserror::Error)]
846#[error("buffer out of space")]
847pub struct OutOfSpace(());
848
849/// A non-null, suitably aligned pointer.
850#[repr(transparent)]
851pub struct Valid<T: ?Sized> {
852 ptr: NonNull<T>,
853}
854
855// Check that we take advantage of `NonNull`'s niche
856// optimizations.
857#[allow(clippy::assertions_on_constants)]
858const _: () = {
859 const WANT: usize = size_of::<Valid<()>>();
860 const GOT: usize = size_of::<Option<Valid<()>>>();
861 assert!(GOT == WANT);
862};
863
864impl<T> Valid<T> {
865 /// Creates a new `Valid`.
866 ///
867 /// It returns [`Err(InvalidPtr)`][InvalidPtr] if `ptr` is
868 /// null or misaligned.
869 #[inline(always)]
870 pub fn new(ptr: *mut T) -> Result<Self, InvalidPtr> {
871 let Some(ptr) = NonNull::new(ptr) else {
872 return Err(InvalidPtr::Null);
873 };
874
875 if !ptr.is_aligned() {
876 return Err(InvalidPtr::Unaligned);
877 }
878 Ok(Self { ptr })
879 }
880}
881
882impl<T: ?Sized> Valid<T> {
883 /// Creates a new `Valid`.
884 pub const fn from_ref(v: &T) -> Self {
885 Self {
886 // SAFETY: `v` is a reference, so it is always
887 // non-null and suitably aligned.
888 ptr: unsafe { NonNull::new_unchecked(ptr::from_ref::<T>(v).cast_mut()) },
889 }
890 }
891
892 /// Creates a new `Valid`.
893 pub fn from_mut(v: &mut T) -> Self {
894 Self {
895 // SAFETY: `v` is a reference, so it is always
896 // non-null and suitably aligned.
897 ptr: unsafe { NonNull::new_unchecked(ptr::from_mut::<T>(v)) },
898 }
899 }
900
901 /// Creates a new `Valid`.
902 ///
903 /// # Safety
904 ///
905 /// - `ptr` must be non-null.
906 /// - `ptr` must be suitably aligned.
907 pub const unsafe fn new_unchecked(ptr: *mut T) -> Self {
908 Self {
909 // SAFETY: See the associated function's safety docs.
910 ptr: unsafe { NonNull::new_unchecked(ptr) },
911 }
912 }
913
914 /// Acquires the underlying pointer.
915 pub const fn as_ptr(self) -> *const T {
916 self.ptr.as_ptr()
917 }
918
919 /// Acquires the underlying pointer.
920 pub const fn as_mut_ptr(self) -> *mut T {
921 self.ptr.as_ptr()
922 }
923
924 /// Reads the underlying pointer.
925 ///
926 /// # Safety
927 ///
928 /// - The pointed-to memory must be initialized.
929 pub const unsafe fn read(self) -> T
930 where
931 T: Sized,
932 {
933 // SAFETY: `Valid` only contains non-null and suitably
934 // aligned pointers. The caller has to uphold the
935 // remaining safety conditions.
936 unsafe { self.ptr.read() }
937 }
938
939 /// Writes to the underlying pointer.
940 ///
941 /// # Safety
942 ///
943 /// - The pointer must be live.
944 pub unsafe fn write(self, val: T)
945 where
946 T: Sized,
947 {
948 // SAFETY: `Valid` only contains non-null and suitably
949 // aligned pointers.
950 unsafe { self.ptr.write(val) }
951 }
952
953 /// Returns a shared reference to the `Valid`.
954 ///
955 /// # Safety
956 ///
957 /// - The pointed-to memory must be initialized.
958 /// - You must uphold Rust's aliasing rules.
959 pub unsafe fn as_ref<'a>(&self) -> &'a T {
960 // SAFETY: `Valid` only contains non-null and suitably
961 // aligned pointers. The caller has to uphold the
962 // remaining safety conditions.
963 unsafe { self.ptr.as_ref() }
964 }
965
966 /// Returns an exclusive reference to the `Valid`.
967 ///
968 /// # Safety
969 ///
970 /// - The pointed-to memory must be initialized.
971 /// - You must uphold Rust's aliasing rules.
972 pub unsafe fn as_mut<'a>(&mut self) -> &'a mut T {
973 // SAFETY: `Valid` only contains non-null and suitably
974 // aligned pointers. The caller has to uphold the
975 // remaining safety conditions.
976 unsafe { self.ptr.as_mut() }
977 }
978}
979
980impl<T: ?Sized> Copy for Valid<T> {}
981
982impl<T: ?Sized> Clone for Valid<T> {
983 fn clone(&self) -> Self {
984 *self
985 }
986}
987
988impl<T: ?Sized> Eq for Valid<T> {}
989
990impl<T: ?Sized> PartialEq for Valid<T> {
991 fn eq(&self, other: &Self) -> bool {
992 ptr::eq(self.as_ptr(), other.as_ptr())
993 }
994}
995
996impl<T: ?Sized> Ord for Valid<T> {
997 fn cmp(&self, other: &Self) -> Ordering {
998 self.as_ptr().cast::<()>().cmp(&other.as_ptr().cast::<()>())
999 }
1000}
1001
1002impl<T: ?Sized> PartialOrd for Valid<T> {
1003 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
1004 Some(self.cmp(other))
1005 }
1006}
1007
1008impl<T: ?Sized> Hash for Valid<T> {
1009 fn hash<H: Hasher>(&self, state: &mut H) {
1010 self.as_ptr().hash(state);
1011 }
1012}
1013
1014impl<T: ?Sized> fmt::Debug for Valid<T> {
1015 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1016 fmt::Pointer::fmt(&self.as_ptr(), f)
1017 }
1018}
1019
1020impl<T: ?Sized> fmt::Pointer for Valid<T> {
1021 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1022 fmt::Pointer::fmt(&self.as_ptr(), f)
1023 }
1024}
1025
1026/// Describes why a raw pointer is invalid.
1027#[derive(Copy, Clone, Debug, Eq, PartialEq, thiserror::Error)]
1028pub enum InvalidPtr {
1029 /// The pointer is null.
1030 #[error("null pointer")]
1031 Null,
1032 /// The pointer is unaligned.
1033 #[error("unaligned pointer")]
1034 Unaligned,
1035}
1036
1037impl InvalidPtr {
1038 /// Returns `InvalidPtr` as a constant string.
1039 pub const fn as_str(self) -> &'static str {
1040 match self {
1041 Self::Null => "null pointer",
1042 Self::Unaligned => "unaligned pointer",
1043 }
1044 }
1045}
1046
1047#[repr(transparent)]
1048struct Hex(usize);
1049
1050impl fmt::Display for Hex {
1051 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1052 write!(f, "{:#0x}", self.0)
1053 }
1054}
1055
1056#[cfg(test)]
1057mod tests {
1058 use core::{
1059 hint::black_box,
1060 mem::ManuallyDrop,
1061 sync::atomic::{AtomicBool, Ordering},
1062 };
1063
1064 use super::*;
1065
1066 #[derive(Copy, Clone)]
1067 struct Dummy {
1068 _pad: u32,
1069 }
1070 impl Dummy {
1071 const fn new(x: u32) -> Self {
1072 Self { _pad: x }
1073 }
1074 }
1075
1076 /// Tests that we detect when a `Safe` is copied.
1077 #[test]
1078 fn test_safe_copy_check() {
1079 let mut orig = MaybeUninit::uninit();
1080 Safe::init(&mut orig, Dummy::new(123));
1081
1082 // SAFETY: `orig` was initialized by `Safe::init`.
1083 unsafe { orig.assume_init_ref() }.check().unwrap();
1084
1085 // Pretend that C copied `orig`.
1086 {
1087 let mut copy = MaybeUninit::<Safe<Dummy>>::uninit();
1088 // SAFETY: FFI call, no invariants.
1089 unsafe {
1090 black_box(libc::memmove(
1091 black_box(ptr::addr_of_mut!(copy).cast()),
1092 black_box(ptr::addr_of!(orig).cast()),
1093 size_of_val(&orig),
1094 ))
1095 };
1096 assert_eq!(
1097 // SAFETY: `orig` was initialized by
1098 // `Safe::init`.
1099 unsafe { black_box(©).assume_init_ref() }.check(),
1100 Err(Error::AddrChanged)
1101 );
1102 }
1103
1104 assert_eq!(
1105 // SAFETY: `orig` was initialized by `Safe::init`.
1106 unsafe { black_box(orig).assume_init() }.check(),
1107 Err(Error::AddrChanged)
1108 );
1109 }
1110
1111 #[test]
1112 fn test_owned_ptr_read() {
1113 struct T<'a>(&'a AtomicBool);
1114 impl Drop for T<'_> {
1115 fn drop(&mut self) {
1116 assert!(!self.0.load(Ordering::SeqCst));
1117 self.0.store(true, Ordering::SeqCst);
1118 }
1119 }
1120
1121 let dropped = AtomicBool::new(false);
1122 let mut val = ManuallyDrop::new(T(&dropped));
1123 // SAFETY: `ptr::addr_of_mut` always returns a non-null,
1124 // suitably aligned pointer.
1125 let ptr = unsafe { OwnedPtr::new(ptr::addr_of_mut!(val)) }.unwrap();
1126
1127 let t = unsafe { ptr.read() };
1128 // `read` should not drop the inner value.
1129 assert!(!dropped.load(Ordering::SeqCst));
1130 drop(t);
1131 // Dropping `t` should flip the flag.
1132 assert!(dropped.load(Ordering::SeqCst));
1133 }
1134}