1use crate::{FromInner, Inner, IntoInner};
2use std::borrow::Cow;
3use std::ffi::CStr;
4use std::ops::{
5 Bound, Index, Range, RangeBounds, RangeFrom, RangeFull, RangeInclusive, RangeTo,
6 RangeToInclusive,
7};
8use uv::{uv_buf_init, uv_buf_t};
9
10#[derive(Debug)]
12pub struct EmptyBufError;
13
14impl std::fmt::Display for EmptyBufError {
15 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
16 f.write_str("The Buf is empty.")
17 }
18}
19
20impl std::error::Error for EmptyBufError {
21 fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
22 None
23 }
24}
25
26fn calc_alloc_size_alignment(size: usize) -> crate::Result<(usize, usize)> {
28 let layout = std::alloc::Layout::new::<std::os::raw::c_char>();
31 let alloc_size = layout
32 .align()
33 .checked_mul(size)
34 .ok_or(crate::Error::ENOMEM)?;
35 Ok((alloc_size, layout.align()))
36}
37
38fn layout(size: usize) -> crate::Result<std::alloc::Layout> {
40 let (alloc_size, align) = calc_alloc_size_alignment(size)?;
41 std::alloc::Layout::from_size_align(alloc_size, align).or(Err(crate::Error::ENOMEM))
42}
43
44#[derive(Clone, Copy)]
46pub struct ReadonlyBuf {
47 buf: *const uv_buf_t,
48}
49
50impl ReadonlyBuf {
51 pub fn is_allocated(&self) -> bool {
53 unsafe { !(*self.buf).base.is_null() }
54 }
55
56 pub fn dealloc(&mut self) {
63 unsafe {
64 if self.is_allocated() {
65 let len = (*self.buf).len as _;
66 if let Ok(layout) = layout(len) {
67 std::alloc::dealloc((*self.buf).base as _, layout);
68 }
69 }
70 }
71 }
72
73 pub fn as_c_str(&self) -> Result<&'_ CStr, EmptyBufError> {
76 let ptr: *const uv_buf_t = self.inner();
77 unsafe {
78 if (*ptr).base.is_null() {
79 Err(EmptyBufError)
80 } else {
81 Ok(CStr::from_ptr((*ptr).base))
82 }
83 }
84 }
85
86 pub fn to_string_lossy(&self) -> Result<Cow<'_, str>, EmptyBufError> {
89 let cstr: &CStr = self.as_c_str()?;
90 Ok(cstr.to_string_lossy())
91 }
92
93 pub fn to_str(&self, len: usize) -> Result<&str, Box<dyn std::error::Error>> {
97 let ptr: *const uv_buf_t = self.inner();
98 unsafe {
99 if (*ptr).base.is_null() {
100 Err(Box::new(EmptyBufError))
101 } else {
102 Ok(std::str::from_utf8(std::slice::from_raw_parts(
103 (*ptr).base as _,
104 len,
105 ))?)
106 }
107 }
108 }
109}
110
111impl FromInner<*const uv_buf_t> for ReadonlyBuf {
112 fn from_inner(buf: *const uv_buf_t) -> ReadonlyBuf {
113 ReadonlyBuf { buf }
114 }
115}
116
117impl Inner<*const uv_buf_t> for ReadonlyBuf {
118 fn inner(&self) -> *const uv_buf_t {
119 self.buf
120 }
121}
122
123impl Index<usize> for ReadonlyBuf {
124 type Output = u8;
125
126 fn index(&self, index: usize) -> &Self::Output {
127 let len = if self.is_allocated() {
128 unsafe { (*self.buf).len }
129 } else {
130 0
131 };
132 if len <= (index as _) {
133 panic!("index {} out of range for Buf of length {}", index, len);
134 }
135 unsafe { &*((*self.buf).base.add(index) as *const u8) }
136 }
137}
138
139fn range_from_readonlybuf<I>(buf: &ReadonlyBuf, index: I) -> &[u8]
143where
144 I: RangeBounds<usize>,
145{
146 let len = if buf.is_allocated() {
147 unsafe { (*buf.buf).len as usize }
148 } else {
149 0
150 };
151
152 let start = match index.start_bound() {
153 Bound::Included(i) => *i,
154 Bound::Excluded(i) => *i + 1,
155 Bound::Unbounded => 0,
156 };
157 let end = match index.end_bound() {
158 Bound::Included(i) => *i + 1,
159 Bound::Excluded(i) => *i,
160 Bound::Unbounded => len,
161 };
162
163 if start > end {
164 panic!("Buf index starts at {} but ends at {}", start, end);
165 }
166
167 if len <= end {
168 panic!("index {} out of range for Buf of length {}", end, len);
169 }
170
171 unsafe { std::slice::from_raw_parts((*buf.buf).base.add(start) as *const u8, end - start) }
172}
173
174impl Index<Range<usize>> for ReadonlyBuf {
175 type Output = [u8];
176
177 fn index(&self, index: Range<usize>) -> &Self::Output {
178 range_from_readonlybuf(self, index)
179 }
180}
181
182impl Index<RangeFrom<usize>> for ReadonlyBuf {
183 type Output = [u8];
184
185 fn index(&self, index: RangeFrom<usize>) -> &Self::Output {
186 range_from_readonlybuf(self, index)
187 }
188}
189
190impl Index<RangeFull> for ReadonlyBuf {
191 type Output = [u8];
192
193 fn index(&self, index: RangeFull) -> &Self::Output {
194 range_from_readonlybuf(self, index)
195 }
196}
197
198impl Index<RangeInclusive<usize>> for ReadonlyBuf {
199 type Output = [u8];
200
201 fn index(&self, index: RangeInclusive<usize>) -> &Self::Output {
202 range_from_readonlybuf(self, index)
203 }
204}
205
206impl Index<RangeTo<usize>> for ReadonlyBuf {
207 type Output = [u8];
208
209 fn index(&self, index: RangeTo<usize>) -> &Self::Output {
210 range_from_readonlybuf(self, index)
211 }
212}
213
214impl Index<RangeToInclusive<usize>> for ReadonlyBuf {
215 type Output = [u8];
216
217 fn index(&self, index: RangeToInclusive<usize>) -> &Self::Output {
218 range_from_readonlybuf(self, index)
219 }
220}
221
222#[derive(Clone, Copy)]
224pub struct Buf {
225 buf: *mut uv_buf_t,
226}
227
228impl Buf {
229 fn alloc(size: usize) -> crate::Result<*mut std::os::raw::c_char> {
230 let layout = layout(size)?;
231 let ptr = unsafe { std::alloc::alloc(layout) as *mut std::os::raw::c_char };
232 if ptr.is_null() {
233 Err(crate::Error::ENOMEM)
234 } else {
235 Ok(ptr)
236 }
237 }
238
239 pub fn new(s: &str) -> Result<Buf, Box<dyn std::error::Error>> {
241 Buf::new_from_bytes(s.as_bytes())
242 }
243
244 pub fn new_from_bytes(bytes: &[u8]) -> Result<Buf, Box<dyn std::error::Error>> {
246 let len = bytes.len();
247 let buflen = len + 1;
248 let base = Buf::alloc(buflen)?;
249 unsafe {
250 base.copy_from_nonoverlapping(bytes.as_ptr() as _, len);
251 base.add(len).write(0);
252 }
253
254 let buf = Box::new(unsafe { uv_buf_init(base, buflen as _) });
255 Ok(Box::into_raw(buf).into_inner())
256 }
257
258 pub fn with_capacity(size: usize) -> crate::Result<Buf> {
260 let base = Buf::alloc(size)?;
261 let buf = Box::new(unsafe { uv_buf_init(base, size as _) });
262 Ok(Box::into_raw(buf).into_inner())
263 }
264
265 pub fn new_from(other: &impl BufTrait, size: Option<usize>) -> crate::Result<Self> {
270 let other = other.readonly();
271 if !other.is_allocated() {
272 if let Some(s) = size {
273 return Buf::with_capacity(s);
274 }
275 return Ok(Buf {
276 buf: std::ptr::null_mut(),
277 });
278 }
279
280 let len = if let Some(s) = size {
281 s
282 } else {
283 unsafe { (*other.buf).len as _ }
284 };
285
286 let mut buf = Buf::with_capacity(len)?;
287 buf.copy_from(&other)?;
288 Ok(buf)
289 }
290
291 pub fn is_allocated(&self) -> bool {
293 unsafe { !(*self.buf).base.is_null() }
294 }
295
296 pub fn resize(&mut self, size: usize) -> crate::Result<()> {
298 if self.is_allocated() {
299 let len = unsafe { (*self.buf).len as _ };
300 if len != size {
301 let (alloc_size, _) = calc_alloc_size_alignment(size)?;
302 let layout = layout(len)?;
303 let ptr = unsafe { std::alloc::realloc((*self.buf).base as _, layout, alloc_size) };
304 if ptr.is_null() {
305 return Err(crate::Error::ENOMEM);
306 }
307 unsafe {
308 (*self.buf).base = ptr as _;
309 (*self.buf).len = alloc_size as _;
310 }
311 }
312 } else {
313 let base = Buf::alloc(size)?;
314 unsafe {
315 (*self.buf).base = base as _;
316 (*self.buf).len = size as _;
317 }
318 }
319 Ok(())
320 }
321
322 pub fn copy_from(&mut self, other: &impl BufTrait) -> crate::Result<()> {
324 let other = other.readonly();
325 if !other.is_allocated() {
326 return Ok(());
327 }
328
329 let other_len = unsafe { (*other.buf).len as _ };
330 if !self.is_allocated() {
331 self.resize(other_len)?;
332 }
333
334 let my_len = unsafe { (*self.buf).len as usize };
335 let len = my_len.min(other_len);
336 unsafe {
337 (*self.buf)
338 .base
339 .copy_from_nonoverlapping((*other.buf).base, len)
340 };
341
342 Ok(())
343 }
344
345 pub fn dealloc(&mut self) {
347 unsafe {
348 if self.is_allocated() {
349 let len = (*self.buf).len as _;
350 if let Ok(layout) = layout(len) {
351 std::alloc::dealloc((*self.buf).base as _, layout);
352 (*self.buf).base = std::ptr::null_mut();
353 (*self.buf).len = 0;
354 }
355 }
356 }
357 }
358
359 pub(crate) fn destroy_container(&mut self) {
361 std::mem::drop(unsafe { Box::from_raw(self.buf) });
362 }
363
364 pub fn destroy(&mut self) {
366 self.dealloc();
367 self.destroy_container();
368 }
369}
370
371impl FromInner<*mut uv_buf_t> for Buf {
372 fn from_inner(buf: *mut uv_buf_t) -> Buf {
373 Buf { buf }
374 }
375}
376
377impl Inner<*mut uv_buf_t> for Buf {
378 fn inner(&self) -> *mut uv_buf_t {
379 self.buf
380 }
381}
382
383impl Inner<*const uv_buf_t> for Buf {
384 fn inner(&self) -> *const uv_buf_t {
385 self.buf
386 }
387}
388
389impl From<Buf> for ReadonlyBuf {
390 fn from(buf: Buf) -> ReadonlyBuf {
391 ReadonlyBuf { buf: buf.buf }
392 }
393}
394
395impl std::convert::TryFrom<&str> for Buf {
396 type Error = Box<dyn std::error::Error>;
397
398 fn try_from(s: &str) -> Result<Self, Self::Error> {
399 Buf::new(s)
400 }
401}
402
403pub trait BufTrait {
404 fn readonly(&self) -> ReadonlyBuf;
405}
406
407impl BufTrait for ReadonlyBuf {
408 fn readonly(&self) -> ReadonlyBuf {
409 ReadonlyBuf { buf: self.buf }
410 }
411}
412
413impl BufTrait for Buf {
414 fn readonly(&self) -> ReadonlyBuf {
415 ReadonlyBuf { buf: self.buf }
416 }
417}
418
419impl<T> FromInner<&[T]> for (*mut uv_buf_t, usize, usize)
420where
421 T: BufTrait,
422{
423 fn from_inner(bufs: &[T]) -> (*mut uv_buf_t, usize, usize) {
424 let mut bufs: std::mem::ManuallyDrop<Vec<uv::uv_buf_t>> = std::mem::ManuallyDrop::new(
429 bufs.iter()
430 .map(|b| unsafe { *b.readonly().inner() }.clone())
431 .collect(),
432 );
433 let bufs_ptr = bufs.as_mut_ptr();
434 let bufs_len = bufs.len();
435 let bufs_capacity = bufs.capacity();
436 (bufs_ptr, bufs_len, bufs_capacity)
437 }
438}