1use super::utils::{safe_copy, set_zero};
2use libc::{c_void, free, malloc, posix_memalign};
3use nix::errno::Errno;
4use std::slice;
5use std::{
6 fmt,
7 ops::{Deref, DerefMut},
8 ptr::{NonNull, null_mut},
9};
10
11use fail::fail_point;
12
13#[repr(C)]
23pub struct Buffer {
24 buf_ptr: NonNull<c_void>,
25 pub(crate) size: u32,
27 pub(crate) cap: u32,
29}
30
31impl fmt::Debug for Buffer {
32 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
33 write!(f, "buffer {:p} size {}", self.get_raw(), self.len())
34 }
35}
36
37unsafe impl Send for Buffer {}
38
39unsafe impl Sync for Buffer {}
40
41pub const MIN_ALIGN: u32 = 512;
42pub const MAX_BUFFER_SIZE: usize = 1 << 31;
43
44fn is_aligned(offset: usize, size: usize) -> bool {
45 return (offset & (MIN_ALIGN as usize - 1) == 0) && (size & (MIN_ALIGN as usize - 1) == 0);
46}
47
48impl Buffer {
49 #[inline]
56 pub fn aligned(size: i32) -> Result<Buffer, Errno> {
57 let mut _buf = Self::_alloc(MIN_ALIGN, size)?;
58 fail_point!("alloc_buf", |_| {
59 rand_buffer(&mut _buf);
60 return Ok(_buf);
61 });
62 return Ok(_buf);
63 }
64
65 #[inline]
74 pub fn aligned_by(size: i32, align: u32) -> Result<Buffer, Errno> {
75 let mut _buf = Self::_alloc(align, size)?;
76 fail_point!("alloc_buf", |_| {
77 rand_buffer(&mut _buf);
78 return Ok(_buf);
79 });
80 return Ok(_buf);
81 }
82
83 #[inline]
90 pub fn alloc(size: i32) -> Result<Buffer, Errno> {
91 let mut _buf = Self::_alloc(0, size)?;
92 fail_point!("alloc_buf", |_| {
93 rand_buffer(&mut _buf);
94 return Ok(_buf);
95 });
96 return Ok(_buf);
97 }
98
99 #[inline]
103 fn _alloc(align: u32, size: i32) -> Result<Self, Errno> {
104 assert!(size > 0);
105 let mut ptr: *mut c_void = null_mut();
106 if align > 0 {
107 debug_assert!((align & (MIN_ALIGN - 1)) == 0);
108 debug_assert!((size as u32 & (align - 1)) == 0);
109 unsafe {
110 let res = posix_memalign(&mut ptr, align as libc::size_t, size as libc::size_t);
111 if res != 0 {
112 return Err(Errno::ENOMEM);
113 }
114 }
115 } else {
116 ptr = unsafe { malloc(size as libc::size_t) };
117 if ptr.is_null() {
118 return Err(Errno::ENOMEM);
119 }
120 }
121 let _size = size as u32 | MAX_BUFFER_SIZE as u32;
123 let _cap = _size;
125 Ok(Self { buf_ptr: unsafe { NonNull::new_unchecked(ptr) }, size: _size, cap: _cap })
126 }
127
128 #[inline]
134 pub fn from_c_ref_mut(ptr: *mut c_void, size: i32) -> Self {
135 assert!(size >= 0);
136 assert!(!ptr.is_null());
137 let _cap = size as u32 | MAX_BUFFER_SIZE as u32;
140 Self { buf_ptr: unsafe { NonNull::new_unchecked(ptr) }, size: size as u32, cap: _cap }
141 }
142
143 #[inline]
149 pub fn from_c_ref_const(ptr: *const c_void, size: i32) -> Self {
150 assert!(size >= 0);
151 assert!(!ptr.is_null());
152 Self {
155 buf_ptr: unsafe { NonNull::new_unchecked(ptr as *mut c_void) },
156 size: size as u32,
157 cap: size as u32,
158 }
159 }
160
161 #[inline(always)]
163 pub fn is_owned(&self) -> bool {
164 self.size & (MAX_BUFFER_SIZE as u32) != 0
165 }
166
167 #[inline(always)]
169 pub fn is_mutable(&self) -> bool {
170 self.cap & (MAX_BUFFER_SIZE as u32) != 0
171 }
172
173 #[inline(always)]
175 pub fn len(&self) -> usize {
176 let size = self.size & (MAX_BUFFER_SIZE as u32 - 1);
177 size as usize
178 }
179
180 #[inline(always)]
182 pub fn capacity(&self) -> usize {
183 let cap = self.cap & (MAX_BUFFER_SIZE as u32 - 1);
184 cap as usize
185 }
186
187 #[inline(always)]
189 pub fn set_len(&mut self, len: usize) {
190 assert!(len < MAX_BUFFER_SIZE, "size {} >= {} is not supported", len, MAX_BUFFER_SIZE);
191 assert!(len <= self.cap as usize, "size {} must be <= {}", len, self.cap);
192 let owned: u32 = self.size & MAX_BUFFER_SIZE as u32;
193 self.size = owned | len as u32;
194 }
195
196 #[inline(always)]
197 pub fn as_ref(&self) -> &[u8] {
198 unsafe { slice::from_raw_parts(self.buf_ptr.as_ptr() as *const u8, self.len()) }
199 }
200
201 #[inline(always)]
205 pub fn as_mut(&mut self) -> &mut [u8] {
206 #[cfg(debug_assertions)]
207 {
208 if !self.is_mutable() {
209 panic!("Cannot change a mutable buffer")
210 }
211 }
212 unsafe { slice::from_raw_parts_mut(self.buf_ptr.as_ptr() as *mut u8, self.len()) }
213 }
214
215 #[inline(always)]
217 pub fn is_aligned(&self) -> bool {
218 is_aligned(self.buf_ptr.as_ptr() as usize, self.capacity())
219 }
220
221 #[inline]
223 pub fn get_raw(&self) -> *const u8 {
224 self.buf_ptr.as_ptr() as *const u8
225 }
226
227 #[inline]
229 pub fn get_raw_mut(&mut self) -> *mut u8 {
230 self.buf_ptr.as_ptr() as *mut u8
231 }
232
233 #[inline]
241 pub fn copy_from(&mut self, offset: usize, other: &[u8]) {
242 let size = self.len();
243 let dst = self.as_mut();
244 if offset > 0 {
245 assert!(offset < size);
246 safe_copy(&mut dst[offset..], other);
247 } else {
248 safe_copy(dst, other);
249 }
250 }
251
252 #[inline]
258 pub fn copy_and_clean(&mut self, offset: usize, other: &[u8]) {
259 let end: usize;
260 let size = self.len();
261 let dst = self.as_mut();
262 assert!(offset < size);
263 if offset > 0 {
264 set_zero(&mut dst[0..offset]);
265 end = offset + safe_copy(&mut dst[offset..], other);
266 } else {
267 end = safe_copy(dst, other);
268 }
269 if size > end {
270 set_zero(&mut dst[end..]);
271 }
272 }
273
274 #[inline]
276 pub fn zero(&mut self) {
277 set_zero(self);
278 }
279
280 #[inline]
282 pub fn set_zero(&mut self, offset: usize, len: usize) {
283 let _len = self.len();
284 let mut end = offset + len;
285 if end > _len {
286 end = _len;
287 }
288 let buf = self.as_mut();
289 if offset > 0 || end < _len {
290 set_zero(&mut buf[offset..end]);
291 } else {
292 set_zero(buf);
293 }
294 }
295}
296
297impl Clone for Buffer {
300 fn clone(&self) -> Self {
301 let mut new_buf = if self.is_aligned() {
302 Self::aligned(self.capacity() as i32).unwrap()
303 } else {
304 Self::alloc(self.capacity() as i32).unwrap()
305 };
306 if self.len() != self.capacity() {
307 new_buf.set_len(self.len());
308 }
309 safe_copy(new_buf.as_mut(), self.as_ref());
310 new_buf
311 }
312}
313
314impl Drop for Buffer {
316 fn drop(&mut self) {
317 if self.is_owned() {
318 unsafe {
319 free(self.buf_ptr.as_ptr());
320 }
321 }
322 }
323}
324
325impl Into<Vec<u8>> for Buffer {
327 fn into(mut self) -> Vec<u8> {
328 if !self.is_owned() {
329 panic!("buffer is c ref, not owned");
330 }
331 self.size &= MAX_BUFFER_SIZE as u32 - 1;
333 return unsafe {
334 Vec::<u8>::from_raw_parts(self.buf_ptr.as_ptr() as *mut u8, self.len(), self.capacity())
335 };
336 }
337}
338
339impl From<Vec<u8>> for Buffer {
341 fn from(buf: Vec<u8>) -> Self {
342 let size = buf.len();
343 let cap = buf.capacity();
344 assert!(size < MAX_BUFFER_SIZE, "size {} >= {} is not supported", size, MAX_BUFFER_SIZE);
345 assert!(cap < MAX_BUFFER_SIZE, "cap {} >= {} is not supported", cap, MAX_BUFFER_SIZE);
346 let _size = size as u32 | MAX_BUFFER_SIZE as u32;
348 let _cap = cap as u32 | MAX_BUFFER_SIZE as u32;
350 Buffer {
351 buf_ptr: unsafe { NonNull::new_unchecked(buf.leak().as_mut_ptr() as *mut c_void) },
352 size: _size,
353 cap: _cap,
354 }
355 }
356}
357
358impl Deref for Buffer {
359 type Target = [u8];
360
361 #[inline]
362 fn deref(&self) -> &[u8] {
363 self.as_ref()
364 }
365}
366
367impl AsRef<[u8]> for Buffer {
368 #[inline]
369 fn as_ref(&self) -> &[u8] {
370 self.as_ref()
371 }
372}
373
374impl AsMut<[u8]> for Buffer {
378 #[inline]
379 fn as_mut(&mut self) -> &mut [u8] {
380 self.as_mut()
381 }
382}
383
384impl DerefMut for Buffer {
385 #[inline]
386 fn deref_mut(&mut self) -> &mut [u8] {
387 self.as_mut()
388 }
389}