1use super::utils::{safe_copy, set_zero};
2use libc::{c_void, free, malloc, posix_memalign};
3use nix::errno::Errno;
4use std::slice;
5use std::{
6 fmt,
7 ops::{Deref, DerefMut},
8 ptr::{NonNull, null_mut},
9};
10
11use fail::fail_point;
12
13#[repr(C)]
23pub struct Buffer {
24 buf_ptr: NonNull<c_void>,
25 pub(crate) size: u32,
27 pub(crate) cap: u32,
29}
30
31impl fmt::Debug for Buffer {
32 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
33 write!(f, "buffer {:p} size {}", self.get_raw(), self.len())
34 }
35}
36
37unsafe impl Send for Buffer {}
38
39unsafe impl Sync for Buffer {}
40
41pub const MIN_ALIGN: u32 = 512;
42pub const MAX_BUFFER_SIZE: usize = 1 << 31;
43
44fn is_aligned(offset: usize, size: usize) -> bool {
45 return (offset & (MIN_ALIGN as usize - 1) == 0) && (size & (MIN_ALIGN as usize - 1) == 0);
46}
47
48impl Buffer {
49 #[inline]
56 pub fn aligned(size: i32) -> Result<Buffer, Errno> {
57 let mut _buf = Self::_alloc(MIN_ALIGN, size)?;
58 fail_point!("alloc_buf", |_| {
59 rand_buffer(&mut _buf);
60 return Ok(_buf);
61 });
62 return Ok(_buf);
63 }
64
65 #[inline]
74 pub fn aligned_by(size: i32, align: u32) -> Result<Buffer, Errno> {
75 let mut _buf = Self::_alloc(align, size)?;
76 fail_point!("alloc_buf", |_| {
77 rand_buffer(&mut _buf);
78 return Ok(_buf);
79 });
80 return Ok(_buf);
81 }
82
83 #[inline]
90 pub fn alloc(size: i32) -> Result<Buffer, Errno> {
91 let mut _buf = Self::_alloc(0, size)?;
92 fail_point!("alloc_buf", |_| {
93 rand_buffer(&mut _buf);
94 return Ok(_buf);
95 });
96 return Ok(_buf);
97 }
98
99 #[inline]
103 fn _alloc(align: u32, size: i32) -> Result<Self, Errno> {
104 assert!(size > 0);
105 let mut ptr: *mut c_void = null_mut();
106 if align > 0 {
107 debug_assert!((align & (MIN_ALIGN - 1)) == 0);
108 debug_assert!((size as u32 & (align - 1)) == 0);
109 unsafe {
110 let res = posix_memalign(&mut ptr, align as libc::size_t, size as libc::size_t);
111 if res != 0 {
112 return Err(Errno::ENOMEM);
113 }
114 }
115 } else {
116 ptr = unsafe { malloc(size as libc::size_t) };
117 if ptr.is_null() {
118 return Err(Errno::ENOMEM);
119 }
120 }
121 let _size = size as u32 | MAX_BUFFER_SIZE as u32;
123 let _cap = _size;
125 Ok(Self { buf_ptr: unsafe { NonNull::new_unchecked(ptr) }, size: _size, cap: _cap })
126 }
127
128 #[inline]
134 pub fn from_c_ref_mut(ptr: *mut c_void, size: i32) -> Self {
135 assert!(size >= 0);
136 assert!(!ptr.is_null());
137 let _cap = size as u32 | MAX_BUFFER_SIZE as u32;
140 Self { buf_ptr: unsafe { NonNull::new_unchecked(ptr) }, size: size as u32, cap: _cap }
141 }
142
143 #[inline]
149 pub fn from_c_ref_const(ptr: *const c_void, size: i32) -> Self {
150 assert!(size >= 0);
151 assert!(!ptr.is_null());
152 Self {
155 buf_ptr: unsafe { NonNull::new_unchecked(ptr as *mut c_void) },
156 size: size as u32,
157 cap: size as u32,
158 }
159 }
160
161 #[inline(always)]
163 pub fn is_owned(&self) -> bool {
164 self.size & (MAX_BUFFER_SIZE as u32) != 0
165 }
166
167 #[inline(always)]
169 pub fn is_mutable(&self) -> bool {
170 self.cap & (MAX_BUFFER_SIZE as u32) != 0
171 }
172
173 #[inline(always)]
175 pub fn len(&self) -> usize {
176 let size = self.size & (MAX_BUFFER_SIZE as u32 - 1);
177 size as usize
178 }
179
180 #[inline(always)]
182 pub fn capacity(&self) -> usize {
183 let cap = self.cap & (MAX_BUFFER_SIZE as u32 - 1);
184 cap as usize
185 }
186
187 #[inline(always)]
189 pub fn set_len(&mut self, len: usize) {
190 assert!(len < MAX_BUFFER_SIZE, "size {} >= {} is not supported", len, MAX_BUFFER_SIZE);
191 assert!(len <= self.cap as usize, "size {} must be <= {}", len, self.cap);
192 let owned: u32 = self.size & MAX_BUFFER_SIZE as u32;
193 self.size = owned | len as u32;
194 }
195
196 #[inline(always)]
197 pub fn as_ref(&self) -> &[u8] {
198 unsafe { slice::from_raw_parts(self.buf_ptr.as_ptr() as *const u8, self.len()) }
199 }
200
201 #[inline(always)]
205 pub fn as_mut(&mut self) -> &mut [u8] {
206 #[cfg(debug_assertions)]
207 {
208 if !self.is_mutable() {
209 panic!("Cannot change a mutable buffer")
210 }
211 }
212 unsafe { slice::from_raw_parts_mut(self.buf_ptr.as_ptr() as *mut u8, self.len()) }
213 }
214
215 #[inline(always)]
217 pub fn is_aligned(&self) -> bool {
218 is_aligned(self.buf_ptr.as_ptr() as usize, self.capacity())
219 }
220
221 #[inline]
223 pub fn get_raw(&self) -> *const u8 {
224 self.buf_ptr.as_ptr() as *const u8
225 }
226
227 #[inline]
229 pub fn get_raw_mut(&mut self) -> *mut u8 {
230 self.buf_ptr.as_ptr() as *mut u8
231 }
232
233 #[inline]
245 pub fn copy_from(&mut self, offset: usize, src: &[u8]) {
246 let size = self.len();
247 let dst = self.as_mut();
248 if offset > 0 {
249 assert!(offset < size);
250 safe_copy(&mut dst[offset..], src);
251 } else {
252 safe_copy(dst, src);
253 }
254 }
255
256 #[inline]
262 pub fn copy_and_clean(&mut self, offset: usize, other: &[u8]) {
263 let end: usize;
264 let size = self.len();
265 let dst = self.as_mut();
266 assert!(offset < size);
267 if offset > 0 {
268 set_zero(&mut dst[0..offset]);
269 end = offset + safe_copy(&mut dst[offset..], other);
270 } else {
271 end = safe_copy(dst, other);
272 }
273 if size > end {
274 set_zero(&mut dst[end..]);
275 }
276 }
277
278 #[inline]
280 pub fn zero(&mut self) {
281 set_zero(self);
282 }
283
284 #[inline]
286 pub fn set_zero(&mut self, offset: usize, len: usize) {
287 let _len = self.len();
288 let mut end = offset + len;
289 if end > _len {
290 end = _len;
291 }
292 let buf = self.as_mut();
293 if offset > 0 || end < _len {
294 set_zero(&mut buf[offset..end]);
295 } else {
296 set_zero(buf);
297 }
298 }
299}
300
301impl Clone for Buffer {
304 fn clone(&self) -> Self {
305 let mut new_buf = if self.is_aligned() {
306 Self::aligned(self.capacity() as i32).unwrap()
307 } else {
308 Self::alloc(self.capacity() as i32).unwrap()
309 };
310 if self.len() != self.capacity() {
311 new_buf.set_len(self.len());
312 }
313 safe_copy(new_buf.as_mut(), self.as_ref());
314 new_buf
315 }
316}
317
318impl Drop for Buffer {
320 fn drop(&mut self) {
321 if self.is_owned() {
322 unsafe {
323 free(self.buf_ptr.as_ptr());
324 }
325 }
326 }
327}
328
329impl Into<Vec<u8>> for Buffer {
331 fn into(mut self) -> Vec<u8> {
332 if !self.is_owned() {
333 panic!("buffer is c ref, not owned");
334 }
335 self.size &= MAX_BUFFER_SIZE as u32 - 1;
337 return unsafe {
338 Vec::<u8>::from_raw_parts(self.buf_ptr.as_ptr() as *mut u8, self.len(), self.capacity())
339 };
340 }
341}
342
343impl From<Vec<u8>> for Buffer {
345 fn from(buf: Vec<u8>) -> Self {
346 let size = buf.len();
347 let cap = buf.capacity();
348 assert!(size < MAX_BUFFER_SIZE, "size {} >= {} is not supported", size, MAX_BUFFER_SIZE);
349 assert!(cap < MAX_BUFFER_SIZE, "cap {} >= {} is not supported", cap, MAX_BUFFER_SIZE);
350 let _size = size as u32 | MAX_BUFFER_SIZE as u32;
352 let _cap = cap as u32 | MAX_BUFFER_SIZE as u32;
354 Buffer {
355 buf_ptr: unsafe { NonNull::new_unchecked(buf.leak().as_mut_ptr() as *mut c_void) },
356 size: _size,
357 cap: _cap,
358 }
359 }
360}
361
362impl Deref for Buffer {
363 type Target = [u8];
364
365 #[inline]
366 fn deref(&self) -> &[u8] {
367 self.as_ref()
368 }
369}
370
371impl AsRef<[u8]> for Buffer {
372 #[inline]
373 fn as_ref(&self) -> &[u8] {
374 self.as_ref()
375 }
376}
377
378impl AsMut<[u8]> for Buffer {
382 #[inline]
383 fn as_mut(&mut self) -> &mut [u8] {
384 self.as_mut()
385 }
386}
387
388impl DerefMut for Buffer {
389 #[inline]
390 fn deref_mut(&mut self) -> &mut [u8] {
391 self.as_mut()
392 }
393}