1use super::utils::{safe_copy, set_zero};
2use libc::{c_void, free, malloc, posix_memalign};
3use nix::errno::Errno;
4use std::slice;
5use std::{
6 fmt,
7 ops::{Deref, DerefMut},
8 ptr::{NonNull, null_mut},
9};
10
11#[repr(C)]
21pub struct Buffer {
22 buf_ptr: NonNull<c_void>,
23 pub(crate) size: u32,
25 pub(crate) cap: u32,
27}
28
29impl fmt::Debug for Buffer {
30 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
31 write!(f, "buffer {:p} size {}", self.get_raw(), self.len())
32 }
33}
34
35unsafe impl Send for Buffer {}
36
37unsafe impl Sync for Buffer {}
38
39pub const MIN_ALIGN: u32 = 512;
40pub const MAX_BUFFER_SIZE: usize = 1 << 31;
41
42fn is_aligned(offset: usize, size: usize) -> bool {
43 return (offset & (MIN_ALIGN as usize - 1) == 0) && (size & (MIN_ALIGN as usize - 1) == 0);
44}
45
46impl Buffer {
47 #[inline]
54 pub fn aligned(size: i32) -> Result<Buffer, Errno> {
55 let mut _buf = Self::_alloc(MIN_ALIGN, size)?;
56 #[cfg(all(feature = "fail", feature = "rand"))]
57 fail::fail_point!("alloc_buf", |_| {
58 rand_buffer(&mut _buf);
59 return Ok(_buf);
60 });
61 return Ok(_buf);
62 }
63
64 #[inline]
73 pub fn aligned_by(size: i32, align: u32) -> Result<Buffer, Errno> {
74 let mut _buf = Self::_alloc(align, size)?;
75 #[cfg(all(feature = "fail", feature = "rand"))]
76 fail::fail_point!("alloc_buf", |_| {
77 rand_buffer(&mut _buf);
78 return Ok(_buf);
79 });
80 return Ok(_buf);
81 }
82
83 #[inline]
90 pub fn alloc(size: i32) -> Result<Buffer, Errno> {
91 let mut _buf = Self::_alloc(0, size)?;
92 #[cfg(all(feature = "fail", feature = "rand"))]
93 fail::fail_point!("alloc_buf", |_| {
94 rand_buffer(&mut _buf);
95 return Ok(_buf);
96 });
97 return Ok(_buf);
98 }
99
100 #[inline]
104 fn _alloc(align: u32, size: i32) -> Result<Self, Errno> {
105 assert!(size > 0);
106 let mut ptr: *mut c_void = null_mut();
107 if align > 0 {
108 debug_assert!((align & (MIN_ALIGN - 1)) == 0);
109 debug_assert!((size as u32 & (align - 1)) == 0);
110 unsafe {
111 let res = posix_memalign(&mut ptr, align as libc::size_t, size as libc::size_t);
112 if res != 0 {
113 return Err(Errno::ENOMEM);
114 }
115 }
116 } else {
117 ptr = unsafe { malloc(size as libc::size_t) };
118 if ptr.is_null() {
119 return Err(Errno::ENOMEM);
120 }
121 }
122 let _size = size as u32 | MAX_BUFFER_SIZE as u32;
124 let _cap = _size;
126 Ok(Self { buf_ptr: unsafe { NonNull::new_unchecked(ptr) }, size: _size, cap: _cap })
127 }
128
129 #[inline]
135 pub fn from_c_ref_mut(ptr: *mut c_void, size: i32) -> Self {
136 assert!(size >= 0);
137 assert!(!ptr.is_null());
138 let _cap = size as u32 | MAX_BUFFER_SIZE as u32;
141 Self { buf_ptr: unsafe { NonNull::new_unchecked(ptr) }, size: size as u32, cap: _cap }
142 }
143
144 #[inline]
150 pub fn from_c_ref_const(ptr: *const c_void, size: i32) -> Self {
151 assert!(size >= 0);
152 assert!(!ptr.is_null());
153 Self {
156 buf_ptr: unsafe { NonNull::new_unchecked(ptr as *mut c_void) },
157 size: size as u32,
158 cap: size as u32,
159 }
160 }
161
162 #[inline(always)]
164 pub fn is_owned(&self) -> bool {
165 self.size & (MAX_BUFFER_SIZE as u32) != 0
166 }
167
168 #[inline(always)]
170 pub fn is_mutable(&self) -> bool {
171 self.cap & (MAX_BUFFER_SIZE as u32) != 0
172 }
173
174 #[inline(always)]
176 pub fn len(&self) -> usize {
177 let size = self.size & (MAX_BUFFER_SIZE as u32 - 1);
178 size as usize
179 }
180
181 #[inline(always)]
183 pub fn capacity(&self) -> usize {
184 let cap = self.cap & (MAX_BUFFER_SIZE as u32 - 1);
185 cap as usize
186 }
187
188 #[inline(always)]
190 pub fn set_len(&mut self, len: usize) {
191 assert!(len < MAX_BUFFER_SIZE, "size {} >= {} is not supported", len, MAX_BUFFER_SIZE);
192 assert!(len <= self.cap as usize, "size {} must be <= {}", len, self.cap);
193 let owned: u32 = self.size & MAX_BUFFER_SIZE as u32;
194 self.size = owned | len as u32;
195 }
196
197 #[inline(always)]
198 pub fn as_ref(&self) -> &[u8] {
199 unsafe { slice::from_raw_parts(self.buf_ptr.as_ptr() as *const u8, self.len()) }
200 }
201
202 #[inline(always)]
206 pub fn as_mut(&mut self) -> &mut [u8] {
207 #[cfg(debug_assertions)]
208 {
209 if !self.is_mutable() {
210 panic!("Cannot change a mutable buffer")
211 }
212 }
213 unsafe { slice::from_raw_parts_mut(self.buf_ptr.as_ptr() as *mut u8, self.len()) }
214 }
215
216 #[inline(always)]
218 pub fn is_aligned(&self) -> bool {
219 is_aligned(self.buf_ptr.as_ptr() as usize, self.capacity())
220 }
221
222 #[inline]
224 pub fn get_raw(&self) -> *const u8 {
225 self.buf_ptr.as_ptr() as *const u8
226 }
227
228 #[inline]
230 pub fn get_raw_mut(&mut self) -> *mut u8 {
231 self.buf_ptr.as_ptr() as *mut u8
232 }
233
234 #[inline]
246 pub fn copy_from(&mut self, offset: usize, src: &[u8]) {
247 let size = self.len();
248 let dst = self.as_mut();
249 if offset > 0 {
250 assert!(offset < size);
251 safe_copy(&mut dst[offset..], src);
252 } else {
253 safe_copy(dst, src);
254 }
255 }
256
257 #[inline]
263 pub fn copy_and_clean(&mut self, offset: usize, other: &[u8]) {
264 let end: usize;
265 let size = self.len();
266 let dst = self.as_mut();
267 assert!(offset < size);
268 if offset > 0 {
269 set_zero(&mut dst[0..offset]);
270 end = offset + safe_copy(&mut dst[offset..], other);
271 } else {
272 end = safe_copy(dst, other);
273 }
274 if size > end {
275 set_zero(&mut dst[end..]);
276 }
277 }
278
279 #[inline]
281 pub fn zero(&mut self) {
282 set_zero(self);
283 }
284
285 #[inline]
287 pub fn set_zero(&mut self, offset: usize, len: usize) {
288 let _len = self.len();
289 let mut end = offset + len;
290 if end > _len {
291 end = _len;
292 }
293 let buf = self.as_mut();
294 if offset > 0 || end < _len {
295 set_zero(&mut buf[offset..end]);
296 } else {
297 set_zero(buf);
298 }
299 }
300}
301
302impl Clone for Buffer {
305 fn clone(&self) -> Self {
306 let mut new_buf = if self.is_aligned() {
307 Self::aligned(self.capacity() as i32).unwrap()
308 } else {
309 Self::alloc(self.capacity() as i32).unwrap()
310 };
311 if self.len() != self.capacity() {
312 new_buf.set_len(self.len());
313 }
314 safe_copy(new_buf.as_mut(), self.as_ref());
315 new_buf
316 }
317}
318
319impl Drop for Buffer {
321 fn drop(&mut self) {
322 if self.is_owned() {
323 unsafe {
324 free(self.buf_ptr.as_ptr());
325 }
326 }
327 }
328}
329
330impl Into<Vec<u8>> for Buffer {
332 fn into(mut self) -> Vec<u8> {
333 if !self.is_owned() {
334 panic!("buffer is c ref, not owned");
335 }
336 self.size &= MAX_BUFFER_SIZE as u32 - 1;
338 return unsafe {
339 Vec::<u8>::from_raw_parts(self.buf_ptr.as_ptr() as *mut u8, self.len(), self.capacity())
340 };
341 }
342}
343
344impl From<Vec<u8>> for Buffer {
346 fn from(buf: Vec<u8>) -> Self {
347 let size = buf.len();
348 let cap = buf.capacity();
349 assert!(size < MAX_BUFFER_SIZE, "size {} >= {} is not supported", size, MAX_BUFFER_SIZE);
350 assert!(cap < MAX_BUFFER_SIZE, "cap {} >= {} is not supported", cap, MAX_BUFFER_SIZE);
351 let _size = size as u32 | MAX_BUFFER_SIZE as u32;
353 let _cap = cap as u32 | MAX_BUFFER_SIZE as u32;
355 Buffer {
356 buf_ptr: unsafe { NonNull::new_unchecked(buf.leak().as_mut_ptr() as *mut c_void) },
357 size: _size,
358 cap: _cap,
359 }
360 }
361}
362
363impl Deref for Buffer {
364 type Target = [u8];
365
366 #[inline]
367 fn deref(&self) -> &[u8] {
368 self.as_ref()
369 }
370}
371
372impl AsRef<[u8]> for Buffer {
373 #[inline]
374 fn as_ref(&self) -> &[u8] {
375 self.as_ref()
376 }
377}
378
379impl AsMut<[u8]> for Buffer {
383 #[inline]
384 fn as_mut(&mut self) -> &mut [u8] {
385 self.as_mut()
386 }
387}
388
389impl DerefMut for Buffer {
390 #[inline]
391 fn deref_mut(&mut self) -> &mut [u8] {
392 self.as_mut()
393 }
394}