hipstr/bytes/raw/
allocated.rs1use alloc::vec::Vec;
4use core::marker::PhantomData;
5use core::mem::{forget, ManuallyDrop, MaybeUninit};
6use core::ops::{Deref, DerefMut, Range};
7use core::panic::{RefUnwindSafe, UnwindSafe};
8use core::ptr::NonNull;
9
10use crate::backend::Backend;
11use crate::smart::{Inner, Smart, UpdateResult};
12
13const MASK: usize = super::MASK as usize;
14const TAG: usize = super::TAG_ALLOCATED as usize;
15
16struct TaggedSmart<B: Backend>(usize, PhantomData<Smart<Vec<u8>, B>>);
21
22impl<B: Backend> Clone for TaggedSmart<B> {
26 #[cfg_attr(coverage_nightly, coverage(off))]
27 fn clone(&self) -> Self {
28 *self
29 }
30}
31
32impl<B: Backend> Copy for TaggedSmart<B> {}
33
34impl<B: Backend> TaggedSmart<B> {
35 #[inline]
37 fn from(raw: Smart<Vec<u8>, B>) -> Self {
38 let ptr = raw.into_raw().as_ptr();
39 debug_assert!(ptr.is_aligned());
40 debug_assert!((ptr as usize) & MASK == 0);
41
42 let addr = ptr.map_addr(|addr| addr | TAG).expose_provenance();
46
47 Self(addr, PhantomData)
48 }
49
50 #[inline]
52 fn into(self) -> Smart<Vec<u8>, B> {
53 let this: Smart<Vec<u8>, B>;
54
55 debug_assert!(self.0 & MASK == TAG);
56
57 unsafe {
61 let new_ptr = core::ptr::with_exposed_provenance_mut::<Inner<Vec<u8>, B>>(self.0 ^ TAG);
62
63 debug_assert!(!new_ptr.is_null());
64
65 #[cfg(miri)]
66 let _ = &*new_ptr; this = Smart::from_raw(NonNull::new_unchecked(new_ptr));
69 }
70
71 this
72 }
73
74 #[inline]
76 const fn check_tag(self) -> bool {
77 self.0 & MASK == TAG
78 }
79
80 fn explicit_clone(self) -> Self {
82 let r = ManuallyDrop::new(self.into());
83 Self::from((*r).clone())
84 }
85}
86
87#[repr(C)]
94pub struct Allocated<B: Backend> {
95 #[cfg(target_endian = "little")]
96 owner: TaggedSmart<B>,
98
99 ptr: *const u8,
101
102 len: usize,
104
105 #[cfg(target_endian = "big")]
106 owner: TaggedSmart<B>,
108}
109
110impl<B: Backend> Copy for Allocated<B> {}
111
112impl<B: Backend> Clone for Allocated<B> {
113 #[cfg_attr(coverage_nightly, coverage(off))]
114 fn clone(&self) -> Self {
115 *self
116 }
117}
118
119unsafe impl<B: Backend + Sync> Sync for Allocated<B> {}
120
121unsafe impl<B: Backend + Send> Send for Allocated<B> {}
122
123impl<B: Backend + Unpin> Unpin for Allocated<B> {}
124
125impl<B: Backend + UnwindSafe> UnwindSafe for Allocated<B> {}
126
127impl<B: Backend + RefUnwindSafe> RefUnwindSafe for Allocated<B> {}
128
129impl<B: Backend> Allocated<B> {
130 fn into_owner(self) -> Smart<Vec<u8>, B> {
132 self.owner.into()
133 }
134
135 fn owner(&self) -> impl Deref<Target = Smart<Vec<u8>, B>> {
141 ManuallyDrop::new(self.into_owner())
142 }
143
144 unsafe fn owner_mut(&mut self) -> impl DerefMut<Target = Smart<Vec<u8>, B>> {
154 debug_assert!(self.is_unique());
155 ManuallyDrop::new(self.into_owner())
156 }
157
158 #[inline]
162 pub fn new(v: Vec<u8>) -> Self {
163 let ptr = v.as_ptr();
164 let len = v.len();
165 let owner = Smart::new(v);
166
167 let this = Self {
168 ptr,
169 len,
170 owner: TaggedSmart::from(owner),
171 };
172
173 debug_assert!(this.is_unique());
174
175 this
176 }
177
178 pub fn from_slice(slice: &[u8]) -> Self {
180 Self::new(slice.to_vec())
181 }
182
183 #[inline]
185 pub const fn len(&self) -> usize {
186 self.len
192 }
193
194 #[inline]
196 pub const fn as_slice(&self) -> &[u8] {
197 unsafe { core::slice::from_raw_parts(self.ptr, self.len) }
204 }
205
206 #[inline]
208 pub const fn as_ptr(&self) -> *const u8 {
209 self.ptr
214 }
215
216 #[inline]
219 #[allow(clippy::needless_pass_by_ref_mut)]
220 pub fn as_mut_ptr(&mut self) -> Option<*mut u8> {
221 if self.is_unique() {
222 Some(self.ptr.cast_mut())
223 } else {
224 None
225 }
226 }
227
228 #[inline]
234 #[allow(clippy::needless_pass_by_ref_mut)]
235 pub unsafe fn as_mut_ptr_unchecked(&mut self) -> *mut u8 {
236 debug_assert!(self.is_unique());
237 self.ptr.cast_mut()
238 }
239
240 #[inline]
242 pub fn as_mut_slice(&mut self) -> Option<&mut [u8]> {
243 if self.is_unique() {
244 Some(unsafe { self.as_mut_slice_unchecked() })
248 } else {
249 None
250 }
251 }
252
253 #[inline]
259 pub unsafe fn as_mut_slice_unchecked(&mut self) -> &mut [u8] {
260 debug_assert!(self.is_valid());
261 debug_assert!(self.is_unique());
262
263 unsafe { core::slice::from_raw_parts_mut(self.ptr.cast_mut(), self.len) }
264 }
265
266 #[inline]
272 pub unsafe fn slice_unchecked(&self, range: Range<usize>) -> Self {
273 debug_assert!(self.is_valid());
274 debug_assert!(range.start <= range.end);
275 debug_assert!(range.start <= self.len);
276 debug_assert!(range.end <= self.len);
277
278 let owner = self.owner.explicit_clone();
279
280 let ptr = unsafe { self.ptr.add(range.start) };
283
284 Self {
285 ptr,
286 len: range.len(),
287 owner,
288 }
289 }
290
291 #[inline]
293 pub fn explicit_clone(&self) -> Self {
294 debug_assert!(self.is_valid());
295
296 let owner = self.owner();
297 if owner.incr() == UpdateResult::Overflow {
298 Self::from_slice(self.as_slice())
299 } else {
300 *self
301 }
302 }
303
304 #[inline]
306 pub fn explicit_drop(self) {
307 debug_assert!(self.is_valid());
308 let _ = self.into_owner();
309 }
310
311 #[inline]
313 #[cfg_attr(coverage_nightly, coverage(off))]
314 pub fn is_valid(&self) -> bool {
315 if !self.owner.check_tag() {
316 return false;
317 }
318
319 let owner = self.owner();
320 let owner_ptr = owner.as_ptr();
321 let shift = unsafe { self.ptr.offset_from(owner_ptr) };
322 shift >= 0 && {
323 #[allow(clippy::cast_sign_loss)]
324 let shift = shift as usize;
325 shift <= owner.len() && shift + self.len <= owner.len()
326 }
327 }
328
329 #[inline]
331 pub fn capacity(&self) -> usize {
332 debug_assert!(self.is_valid());
333
334 self.owner().capacity()
335 }
336
337 #[inline]
339 pub fn try_into_vec(self) -> Result<Vec<u8>, Self> {
340 debug_assert!(self.is_valid());
341
342 let owner = self.owner();
343 if self.ptr != owner.as_ptr() {
344 return Err(self);
346 }
347
348 let len = self.len();
349
350 self.into_owner().try_unwrap().map_or_else(
351 |owner| {
352 forget(owner); Err(self)
354 },
355 |mut owner| {
356 owner.truncate(len);
357 Ok(owner)
358 },
359 )
360 }
361
362 pub fn is_unique(&self) -> bool {
364 debug_assert!(self.is_valid());
365
366 self.owner().is_unique()
367 }
368
369 pub unsafe fn push_slice_unchecked(&mut self, addition: &[u8]) {
375 debug_assert!(self.is_valid());
376
377 let mut owner = unsafe { self.owner_mut() };
379
380 let v = unsafe { owner.as_mut_unchecked() };
382
383 #[allow(clippy::cast_sign_loss)]
385 let shift = unsafe { self.ptr.offset_from(v.as_ptr()) as usize };
386 v.truncate(shift + self.len);
387 v.extend_from_slice(addition);
388 self.len += addition.len();
389
390 self.ptr = unsafe { v.as_ptr().add(shift) };
393 }
394
395 pub fn spare_capacity_mut(&mut self) -> &mut [MaybeUninit<u8>] {
403 debug_assert!(self.is_valid());
404
405 if !self.is_unique() {
406 return &mut [];
407 }
408
409 let mut owner = unsafe { self.owner_mut() };
411
412 let v = unsafe { owner.as_mut_unchecked_extended() };
414
415 #[allow(clippy::cast_sign_loss)]
418 let start = unsafe { self.ptr.offset_from(v.as_ptr()) as usize };
419
420 v.truncate(start + self.len);
422
423 v.spare_capacity_mut()
424 }
425
426 pub unsafe fn set_len(&mut self, new_len: usize) {
441 if new_len <= self.len {
442 self.len = new_len;
443 return;
444 }
445
446 debug_assert!(self.is_unique());
447 debug_assert!(new_len <= self.capacity());
448
449 let mut owner = unsafe { self.owner_mut() };
451
452 let v = unsafe { owner.as_mut_unchecked() };
454
455 #[allow(clippy::cast_sign_loss)]
457 let start = unsafe { self.ptr.offset_from(v.as_ptr()).abs_diff(0) };
458 unsafe { v.set_len(start + new_len) };
459 self.len = new_len;
460 }
461
462 pub fn shrink_to(&mut self, min_capacity: usize) {
469 let min_capacity = min_capacity.max(self.len);
470
471 if self.capacity() <= min_capacity {
472 return;
473 }
474
475 let mut new_vec = Vec::with_capacity(min_capacity);
476 new_vec.extend_from_slice(self.as_slice());
477 let old = core::mem::replace(self, Self::new(new_vec));
478 old.explicit_drop();
479 }
480}
481
482#[cfg(test)]
483mod tests {
484 use alloc::vec;
485
486 use super::Allocated;
487 use crate::Rc;
488
489 #[test]
490 fn test_alloc() {
491 let allocated = Allocated::<Rc>::new(vec![]);
492 let _ = allocated.explicit_drop();
493 }
494
495 #[test]
496 #[cfg_attr(coverage_nightly, coverage(off))]
497 fn test_try_into_vec() {
498 let allocated = Allocated::<Rc>::new(vec![0, 1, 2]);
499 assert_eq!(allocated.owner().ref_count(), 1);
500
501 {
502 let slice = unsafe { allocated.slice_unchecked(1..2) };
503
504 assert_eq!(allocated.owner().ref_count(), 2);
505 let Err(allocated) = slice.try_into_vec() else {
506 panic!("shared reference cannot be converted to vec")
507 };
508 allocated.explicit_drop();
509 }
510
511 assert_eq!(allocated.owner().ref_count(), 1);
512
513 assert!(allocated.try_into_vec().is_ok());
514 }
515}