1use super::utils::{safe_copy, set_zero};
2use libc;
3use nix::errno::Errno;
4use std::slice;
5use std::{
6 fmt,
7 ops::{Deref, DerefMut},
8};
9
10use fail::fail_point;
11
12#[repr(C, align(1))]
22pub struct Buffer {
23 buf_ptr: *mut libc::c_void,
24 pub(crate) size: u32,
26 pub(crate) cap: u32,
28}
29
30impl fmt::Debug for Buffer {
31 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
32 write!(f, "buffer {:p} size {}", self.get_raw(), self.len())
33 }
34}
35
36unsafe impl Send for Buffer {}
37
38unsafe impl Sync for Buffer {}
39
40pub const MIN_ALIGN: usize = 512;
41pub const MAX_BUFFER_SIZE: usize = 1 << 31;
42
43fn is_aligned(offset: usize, size: usize) -> bool {
44 return (offset & (MIN_ALIGN - 1) == 0) && (size & (MIN_ALIGN - 1) == 0);
45}
46
47impl Buffer {
48 #[inline]
53 pub fn aligned(size: usize) -> Result<Buffer, Errno> {
54 let mut _buf = Self::_alloc(MIN_ALIGN, size)?;
55 fail_point!("alloc_buf", |_| {
56 rand_buffer(&mut _buf);
57 return Ok(_buf);
58 });
59 return Ok(_buf);
60 }
61
62 #[inline]
67 pub fn alloc(size: usize) -> Result<Buffer, Errno> {
68 let mut _buf = Self::_alloc(0, size)?;
69 fail_point!("alloc_buf", |_| {
70 rand_buffer(&mut _buf);
71 return Ok(_buf);
72 });
73 return Ok(_buf);
74 }
75
76 #[inline]
78 fn _alloc(align: usize, size: usize) -> Result<Self, Errno> {
79 let mut ptr: *mut libc::c_void = std::ptr::null_mut();
80 log_assert!(
81 size < MAX_BUFFER_SIZE,
82 "size {} >= {} is not supported",
83 size,
84 MAX_BUFFER_SIZE
85 );
86 if align > 0 {
87 debug_assert!((align & (MIN_ALIGN - 1)) == 0);
88 debug_assert!((size & (align - 1)) == 0);
89 unsafe {
90 let res =
91 libc::posix_memalign(&mut ptr, align as libc::size_t, size as libc::size_t);
92 if res != 0 {
93 return Err(Errno::ENOMEM);
94 }
95 }
96 } else {
97 ptr = unsafe { libc::malloc(size as libc::size_t) };
98 if ptr == std::ptr::null_mut() {
99 return Err(Errno::ENOMEM);
100 }
101 }
102 let _size = size as u32 | MAX_BUFFER_SIZE as u32;
104 let _cap = _size;
106 Ok(Self {
107 buf_ptr: ptr,
108 size: _size,
109 cap: _cap,
110 })
111 }
112
113 #[inline]
117 pub fn from_c_ref_mut(ptr: *mut libc::c_void, size: usize) -> Self {
118 log_assert!(
119 size < MAX_BUFFER_SIZE,
120 "size {} >= {} is not supported",
121 size,
122 MAX_BUFFER_SIZE
123 );
124 log_assert!(ptr != std::ptr::null_mut());
125 let _cap = size as u32 | MAX_BUFFER_SIZE as u32;
128 Self {
129 buf_ptr: ptr,
130 size: size as u32,
131 cap: _cap,
132 }
133 }
134
135 #[inline]
139 pub fn from_c_ref_const(ptr: *const libc::c_void, size: usize) -> Self {
140 log_assert!(
141 size < MAX_BUFFER_SIZE,
142 "size {} >= {} is not supported",
143 size,
144 MAX_BUFFER_SIZE
145 );
146 log_assert!(ptr != std::ptr::null());
147 Self {
150 buf_ptr: unsafe { std::mem::transmute(ptr) },
151 size: size as u32,
152 cap: size as u32,
153 }
154 }
155
156 #[inline(always)]
157 pub fn is_owned(&self) -> bool {
158 self.size & (MAX_BUFFER_SIZE as u32) != 0
159 }
160
161 #[inline(always)]
162 pub fn is_mutable(&self) -> bool {
163 self.cap & (MAX_BUFFER_SIZE as u32) != 0
164 }
165
166 #[inline(always)]
167 pub fn len(&self) -> usize {
168 let size = self.size & (MAX_BUFFER_SIZE as u32 - 1);
169 size as usize
170 }
171
172 #[inline(always)]
173 pub fn capacity(&self) -> usize {
174 let cap = self.cap & (MAX_BUFFER_SIZE as u32 - 1);
175 cap as usize
176 }
177
178 #[inline(always)]
179 pub fn set_len(&mut self, len: usize) {
180 log_assert!(
181 len < MAX_BUFFER_SIZE,
182 "size {} >= {} is not supported",
183 len,
184 MAX_BUFFER_SIZE
185 );
186 log_assert!(
187 len <= self.cap as usize,
188 "size {} must be <= {}",
189 len,
190 self.cap
191 );
192 let owned: u32 = self.size & MAX_BUFFER_SIZE as u32;
193 self.size = owned | len as u32;
194 }
195
196 #[inline(always)]
197 pub fn as_ref(&self) -> &[u8] {
198 unsafe { slice::from_raw_parts(self.buf_ptr as *const u8, self.len()) }
199 }
200
201 #[inline(always)]
205 pub fn as_mut(&mut self) -> &mut [u8] {
206 #[cfg(debug_assertions)]
207 {
208 if !self.is_mutable() {
209 panic!("Cannot change a mutable buffer")
210 }
211 }
212 unsafe { slice::from_raw_parts_mut(self.buf_ptr as *mut u8, self.len()) }
213 }
214
215 #[inline(always)]
217 pub fn is_aligned(&self) -> bool {
218 is_aligned(self.buf_ptr as usize, self.capacity())
219 }
220
221 #[inline]
223 pub fn get_raw(&self) -> *const u8 {
224 self.buf_ptr as *const u8
225 }
226
227 #[inline]
229 pub fn get_raw_mut(&mut self) -> *mut u8 {
230 self.buf_ptr as *mut u8
231 }
232
233 #[inline]
241 pub fn copy_from(&mut self, offset: usize, other: &[u8]) {
242 let size = self.len();
243 let dst = self.as_mut();
244 if offset > 0 {
245 assert!(offset < size);
246 safe_copy(&mut dst[offset..], other);
247 } else {
248 safe_copy(dst, other);
249 }
250 }
251
252 #[inline]
258 pub fn copy_and_clean(&mut self, offset: usize, other: &[u8]) {
259 let end: usize;
260 let size = self.len();
261 let dst = self.as_mut();
262 assert!(offset < size);
263 if offset > 0 {
264 set_zero(&mut dst[0..offset]);
265 end = offset + safe_copy(&mut dst[offset..], other);
266 } else {
267 end = safe_copy(dst, other);
268 }
269 if size > end {
270 set_zero(&mut dst[end..]);
271 }
272 }
273
274 #[inline]
276 pub fn zero(&mut self) {
277 set_zero(self);
278 }
279
280 #[inline]
282 pub fn set_zero(&mut self, offset: usize, len: usize) {
283 let _len = self.len();
284 let mut end = offset + len;
285 if end > _len {
286 end = _len;
287 }
288 let buf = self.as_mut();
289 if offset > 0 || end < _len {
290 set_zero(&mut buf[offset..end]);
291 } else {
292 set_zero(buf);
293 }
294 }
295}
296
297impl Clone for Buffer {
298 fn clone(&self) -> Self {
299 let mut new_buf = if self.is_aligned() {
300 Self::aligned(self.capacity()).unwrap()
301 } else {
302 Self::alloc(self.capacity()).unwrap()
303 };
304 if self.len() != self.capacity() {
305 new_buf.set_len(self.len());
306 }
307 safe_copy(new_buf.as_mut(), self.as_ref());
308 new_buf
309 }
310}
311
312impl Drop for Buffer {
314 fn drop(&mut self) {
315 if self.is_owned() {
316 unsafe {
317 libc::free(self.buf_ptr);
318 }
319 }
320 }
321}
322
323impl Into<Vec<u8>> for Buffer {
325 fn into(mut self) -> Vec<u8> {
326 if !self.is_owned() {
327 panic!("buffer is c ref, not owned");
328 }
329 self.size &= MAX_BUFFER_SIZE as u32 - 1;
331 return unsafe {
332 Vec::<u8>::from_raw_parts(self.buf_ptr as *mut u8, self.len(), self.capacity())
333 };
334 }
335}
336
337impl From<Vec<u8>> for Buffer {
339 fn from(buf: Vec<u8>) -> Self {
340 let size = buf.len();
341 let cap = buf.capacity();
342 log_assert!(
343 size < MAX_BUFFER_SIZE,
344 "size {} >= {} is not supported",
345 size,
346 MAX_BUFFER_SIZE
347 );
348 log_assert!(
349 cap < MAX_BUFFER_SIZE,
350 "cap {} >= {} is not supported",
351 cap,
352 MAX_BUFFER_SIZE
353 );
354 let _size = size as u32 | MAX_BUFFER_SIZE as u32;
356 let _cap = cap as u32 | MAX_BUFFER_SIZE as u32;
358 Buffer {
359 buf_ptr: buf.leak().as_mut_ptr() as *mut libc::c_void,
360 size: _size,
361 cap: _cap,
362 }
363 }
364}
365
366impl Deref for Buffer {
367 type Target = [u8];
368
369 #[inline]
370 fn deref(&self) -> &[u8] {
371 self.as_ref()
372 }
373}
374
375impl AsRef<[u8]> for Buffer {
376 #[inline]
377 fn as_ref(&self) -> &[u8] {
378 self.as_ref()
379 }
380}
381
382impl AsMut<[u8]> for Buffer {
386 #[inline]
387 fn as_mut(&mut self) -> &mut [u8] {
388 self.as_mut()
389 }
390}
391
392impl DerefMut for Buffer {
393 #[inline]
394 fn deref_mut(&mut self) -> &mut [u8] {
395 self.as_mut()
396 }
397}