io_engine/buffer/
buffer.rs1use super::utils::{safe_copy, set_zero};
2use libc;
3use nix::errno::Errno;
4use std::slice;
5use std::{
6 fmt,
7 ops::{Deref, DerefMut},
8};
9
10use fail::fail_point;
11
12#[repr(C, align(1))]
16pub struct Buffer {
17 pub buf_ptr: *mut libc::c_void,
18 pub(crate) size: u32,
20 pub(crate) cap: u32,
22}
23
24impl fmt::Debug for Buffer {
25 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
26 write!(f, "buffer {:p} size {}", self.get_raw(), self.len())
27 }
28}
29
30unsafe impl Send for Buffer {}
31
32unsafe impl Sync for Buffer {}
33
34pub const MIN_ALIGN: usize = 512;
35pub const MAX_BUFFER_SIZE: usize = 1 << 31;
36
37fn is_aligned(offset: usize, size: usize) -> bool {
38 return (offset & (MIN_ALIGN - 1) == 0) && (size & (MIN_ALIGN - 1) == 0);
39}
40
41impl Buffer {
42 #[inline]
46 pub fn aligned(size: usize) -> Result<Buffer, Errno> {
47 let mut _buf = Self::_alloc(MIN_ALIGN, size)?;
48 fail_point!("alloc_buf", |_| {
49 rand_buffer(&mut _buf);
50 return Ok(_buf);
51 });
52 return Ok(_buf);
53 }
54
55 #[inline]
59 pub fn alloc(size: usize) -> Result<Buffer, Errno> {
60 let mut _buf = Self::_alloc(0, size)?;
61 fail_point!("alloc_buf", |_| {
62 rand_buffer(&mut _buf);
63 return Ok(_buf);
64 });
65 return Ok(_buf);
66 }
67
68 #[inline]
70 fn _alloc(align: usize, size: usize) -> Result<Self, Errno> {
71 let mut ptr: *mut libc::c_void = std::ptr::null_mut();
72 log_assert!(
73 size < MAX_BUFFER_SIZE,
74 "size {} >= {} is not supported",
75 size,
76 MAX_BUFFER_SIZE
77 );
78 if align > 0 {
79 debug_assert!((align & (MIN_ALIGN - 1)) == 0);
80 debug_assert!((size & (align - 1)) == 0);
81 unsafe {
82 let res =
83 libc::posix_memalign(&mut ptr, align as libc::size_t, size as libc::size_t);
84 if res != 0 {
85 return Err(Errno::ENOMEM);
86 }
87 }
88 } else {
89 ptr = unsafe { libc::malloc(size as libc::size_t) };
90 if ptr == std::ptr::null_mut() {
91 return Err(Errno::ENOMEM);
92 }
93 }
94 let _size = size as u32 | MAX_BUFFER_SIZE as u32;
96 let _cap = _size;
98 Ok(Self { buf_ptr: ptr, size: _size, cap: _cap })
99 }
100
101 #[inline(always)]
102 pub fn is_owned(&self) -> bool {
103 self.size & (MAX_BUFFER_SIZE as u32) != 0
104 }
105
106 #[inline(always)]
107 pub fn is_mutable(&self) -> bool {
108 self.cap & (MAX_BUFFER_SIZE as u32) != 0
109 }
110
111 #[inline(always)]
112 pub fn len(&self) -> usize {
113 let size = self.size & (MAX_BUFFER_SIZE as u32 - 1);
114 size as usize
115 }
116
117 #[inline(always)]
118 pub fn capacity(&self) -> usize {
119 let cap = self.cap & (MAX_BUFFER_SIZE as u32 - 1);
120 cap as usize
121 }
122
123 #[inline(always)]
124 pub fn set_len(&mut self, len: usize) {
125 log_assert!(len < MAX_BUFFER_SIZE, "size {} >= {} is not supported", len, MAX_BUFFER_SIZE);
126 log_assert!(len <= self.cap as usize, "size {} must be <= {}", len, self.cap);
127 let owned: u32 = self.size & MAX_BUFFER_SIZE as u32;
128 self.size = owned | len as u32;
129 }
130
131 #[inline]
135 pub fn from_c_ref_mut(ptr: *mut libc::c_void, size: usize) -> Self {
136 log_assert!(
137 size < MAX_BUFFER_SIZE,
138 "size {} >= {} is not supported",
139 size,
140 MAX_BUFFER_SIZE
141 );
142 log_assert!(ptr != std::ptr::null_mut());
143 let _cap = size as u32 | MAX_BUFFER_SIZE as u32;
146 Self { buf_ptr: ptr, size: size as u32, cap: _cap }
147 }
148
149 #[inline]
153 pub fn from_c_ref_const(ptr: *const libc::c_void, size: usize) -> Self {
154 log_assert!(
155 size < MAX_BUFFER_SIZE,
156 "size {} >= {} is not supported",
157 size,
158 MAX_BUFFER_SIZE
159 );
160 log_assert!(ptr != std::ptr::null());
161 Self { buf_ptr: unsafe { std::mem::transmute(ptr) }, size: size as u32, cap: size as u32 }
164 }
165
166 #[inline(always)]
167 pub fn as_ref(&self) -> &[u8] {
168 unsafe { slice::from_raw_parts(self.buf_ptr as *const u8, self.len()) }
169 }
170
171 #[inline(always)]
173 pub fn as_mut(&mut self) -> &mut [u8] {
174 #[cfg(debug_assertions)]
175 {
176 if !self.is_mutable() {
177 panic!("Cannot change a mutable buffer")
178 }
179 }
180 unsafe { slice::from_raw_parts_mut(self.buf_ptr as *mut u8, self.len()) }
181 }
182
183 #[inline(always)]
185 pub fn is_aligned(&self) -> bool {
186 is_aligned(self.buf_ptr as usize, self.capacity())
187 }
188
189 #[inline]
191 pub fn get_raw(&self) -> *const u8 {
192 self.buf_ptr as *const u8
193 }
194
195 #[inline]
197 pub fn get_raw_mut(&mut self) -> *mut u8 {
198 self.buf_ptr as *mut u8
199 }
200
201 #[inline]
209 pub fn copy_from(&mut self, offset: usize, other: &[u8]) {
210 let size = self.len();
211 let dst = self.as_mut();
212 if offset > 0 {
213 assert!(offset < size);
214 safe_copy(&mut dst[offset..], other);
215 } else {
216 safe_copy(dst, other);
217 }
218 }
219
220 #[inline]
226 pub fn copy_and_clean(&mut self, offset: usize, other: &[u8]) {
227 let end: usize;
228 let size = self.len();
229 let dst = self.as_mut();
230 assert!(offset < size);
231 if offset > 0 {
232 set_zero(&mut dst[0..offset]);
233 end = offset + safe_copy(&mut dst[offset..], other);
234 } else {
235 end = safe_copy(dst, other);
236 }
237 if size > end {
238 set_zero(&mut dst[end..]);
239 }
240 }
241
242 #[inline]
244 pub fn zero(&mut self) {
245 set_zero(self);
246 }
247
248 #[inline]
250 pub fn set_zero(&mut self, offset: usize, len: usize) {
251 let _len = self.len();
252 let mut end = offset + len;
253 if end > _len {
254 end = _len;
255 }
256 let buf = self.as_mut();
257 if offset > 0 || end < _len {
258 set_zero(&mut buf[offset..end]);
259 } else {
260 set_zero(buf);
261 }
262 }
263}
264
265impl Clone for Buffer {
266 fn clone(&self) -> Self {
267 let mut new_buf = if self.is_aligned() {
268 Self::aligned(self.capacity()).unwrap()
269 } else {
270 Self::alloc(self.capacity()).unwrap()
271 };
272 if self.len() != self.capacity() {
273 new_buf.set_len(self.len());
274 }
275 safe_copy(new_buf.as_mut(), self.as_ref());
276 new_buf
277 }
278}
279
280impl Drop for Buffer {
281 fn drop(&mut self) {
282 if self.is_owned() {
283 unsafe {
284 libc::free(self.buf_ptr);
285 }
286 }
287 }
288}
289
290impl Into<Vec<u8>> for Buffer {
291 fn into(mut self) -> Vec<u8> {
292 if !self.is_owned() {
293 panic!("buffer is c ref, not owned");
294 }
295 self.size &= MAX_BUFFER_SIZE as u32 - 1;
297 return unsafe {
298 Vec::<u8>::from_raw_parts(self.buf_ptr as *mut u8, self.len(), self.capacity())
299 };
300 }
301}
302
303impl From<Vec<u8>> for Buffer {
304 fn from(buf: Vec<u8>) -> Self {
305 let size = buf.len();
306 let cap = buf.capacity();
307 log_assert!(
308 size < MAX_BUFFER_SIZE,
309 "size {} >= {} is not supported",
310 size,
311 MAX_BUFFER_SIZE
312 );
313 log_assert!(cap < MAX_BUFFER_SIZE, "cap {} >= {} is not supported", cap, MAX_BUFFER_SIZE);
314 let _size = size as u32 | MAX_BUFFER_SIZE as u32;
316 let _cap = cap as u32 | MAX_BUFFER_SIZE as u32;
318 Buffer { buf_ptr: buf.leak().as_mut_ptr() as *mut libc::c_void, size: _size, cap: _cap }
319 }
320}
321
322impl Deref for Buffer {
323 type Target = [u8];
324
325 #[inline]
326 fn deref(&self) -> &[u8] {
327 self.as_ref()
328 }
329}
330
331impl AsRef<[u8]> for Buffer {
332 #[inline]
333 fn as_ref(&self) -> &[u8] {
334 self.as_ref()
335 }
336}
337
338impl AsMut<[u8]> for Buffer {
339 #[inline]
340 fn as_mut(&mut self) -> &mut [u8] {
341 self.as_mut()
342 }
343}
344
345impl DerefMut for Buffer {
346 #[inline]
347 fn deref_mut(&mut self) -> &mut [u8] {
348 self.as_mut()
349 }
350}