1use std::fmt::Debug;
14use std::io;
15use std::io::Error;
16use std::ops::Deref;
17use std::{
18 alloc::{dealloc, Layout},
19 borrow::Borrow,
20 cell::RefCell,
21 cmp, hash,
22 io::Read,
23 io::Result,
24 rc::Rc,
25 slice,
26 sync::atomic::{AtomicUsize, Ordering},
27};
28
29use super::Bt;
30
31static EMPTY_ARRAY: &[u8] = &[];
32const STATIC_TYPE: u8 = 1;
33const SHARED_TYPE: u8 = 2;
34
35pub struct Binary {
37 ptr: *const u8,
38 counter: Rc<RefCell<AtomicUsize>>,
40 cursor: usize,
42 mark: usize,
44 len: usize,
46 vtable: &'static Vtable,
48}
49
50unsafe impl Sync for Binary {}
51
52unsafe impl Send for Binary {}
53
54pub struct Vtable {
55 pub clone: unsafe fn(bin: &Binary) -> Binary,
56 pub to_vec: unsafe fn(bin: &Binary) -> Vec<u8>,
57 pub drop: unsafe fn(bin: &mut Binary),
58 pub vtype: fn() -> u8,
59}
60
61const STATIC_VTABLE: Vtable = Vtable {
62 clone: static_clone,
63 to_vec: static_to_vec,
64 drop: static_drop,
65 vtype: || STATIC_TYPE,
66};
67
68unsafe fn static_clone(bin: &Binary) -> Binary {
69 let slice = slice::from_raw_parts(bin.ptr, bin.len);
70 Binary::from_static(slice)
71}
72
73unsafe fn static_to_vec(bin: &Binary) -> Vec<u8> {
74 let slice = slice::from_raw_parts(bin.ptr, bin.len);
75 slice.to_vec()
76}
77
78unsafe fn static_drop(_bin: &mut Binary) {
79 }
81
82const SHARED_VTABLE: Vtable = Vtable {
83 clone: shared_clone,
84 to_vec: shared_to_vec,
85 drop: shared_drop,
86 vtype: || SHARED_TYPE,
87};
88
89unsafe fn shared_clone(bin: &Binary) -> Binary {
90 bin.counter.borrow_mut().fetch_add(1, Ordering::Relaxed);
91 Binary {
92 ptr: bin.ptr,
93 counter: bin.counter.clone(),
94 cursor: bin.cursor,
95 mark: bin.mark,
96 len: bin.len,
97 vtable: bin.vtable,
98 }
99}
100
101unsafe fn shared_to_vec(bin: &Binary) -> Vec<u8> {
102 let slice = slice::from_raw_parts(bin.ptr, bin.len);
103 slice.to_vec()
104}
105
106unsafe fn shared_drop(bin: &mut Binary) {
107 if (*bin.counter).borrow_mut().fetch_sub(1, Ordering::Release) == 1 {
108 let ori = bin.ptr.sub(bin.cursor);
109 dealloc(
110 ori as *mut u8,
111 Layout::from_size_align(bin.cursor + bin.len, 1).unwrap(),
112 );
113 }
114}
115impl Binary {
116 pub fn new() -> Binary {
117 Binary::from_static(EMPTY_ARRAY)
118 }
119
120 pub fn from_static(val: &'static [u8]) -> Binary {
121 Binary {
122 ptr: val.as_ptr(),
123 counter: Rc::new(RefCell::new(AtomicUsize::new(0))),
124 cursor: 0,
125 mark: 0,
126 len: val.len(),
127 vtable: &STATIC_VTABLE,
128 }
129 }
130
131 pub fn len(&self) -> usize {
141 self.len
142 }
143
144 #[inline]
155 pub const fn is_empty(&self) -> bool {
156 self.len == 0
157 }
158
159 #[inline]
160 pub fn to_vec(&self) -> Vec<u8> {
161 unsafe { (self.vtable.to_vec)(self) }
162 }
163
164 pub fn get_refs(&self) -> usize {
180 (*self.counter)
181 .borrow()
182 .load(std::sync::atomic::Ordering::SeqCst)
183 }
184
185 #[inline]
186 fn as_slice(&self) -> &[u8] {
187 unsafe { slice::from_raw_parts(self.ptr, self.len) }
188 }
189
190 #[inline]
191 unsafe fn inc_start(&mut self, by: usize) {
192 if by == 0 {
193 return;
194 }
195 debug_assert!(self.len >= by, "internal: inc_start out of bounds");
196 self.len -= by;
197 self.ptr = self.ptr.add(by);
198 self.cursor += by;
199 }
200
201 #[inline]
202 pub fn clear(&mut self) {
203 unsafe { self.sub_start(self.cursor) }
204 }
205
206 #[inline]
207 unsafe fn sub_start(&mut self, by: usize) {
208 debug_assert!(self.cursor >= by, "internal: inc_start out of bounds");
210 self.len += by;
211 self.ptr = self.ptr.sub(by);
212 self.cursor -= by;
213 self.mark = std::cmp::min(self.mark, self.cursor);
214 }
215
216 pub fn copy_from_slice(data: &[u8]) -> Self {
217 data.to_vec().into()
218 }
219
220 #[inline]
221 pub fn into_slice_all(&self) -> Vec<u8> {
222 if (self.vtable.vtype)() == STATIC_TYPE {
223 self.to_vec()
224 } else {
225 if (*self.counter).borrow().load(Ordering::SeqCst) == 1 {
226 (*self.counter).borrow().fetch_add(1, Ordering::Relaxed);
227 self.to_vec()
228 } else {
229 self.to_vec()
230 }
231 }
232 }
233
234 #[inline]
235 pub fn into_slice(&self) -> Vec<u8> {
236 if (self.vtable.vtype)() == STATIC_TYPE {
237 self.to_vec()[self.cursor..(self.cursor + self.len)].to_vec()
238 } else {
239 if (*self.counter).borrow().load(Ordering::SeqCst) == 1 {
240 (*self.counter).borrow().fetch_add(1, Ordering::Relaxed);
241 self.to_vec()[self.cursor..(self.cursor + self.len)].to_vec()
242 } else {
243 self.to_vec()[self.cursor..(self.cursor + self.len)].to_vec()
244 }
245 }
246 }
247}
248
249impl Clone for Binary {
250 fn clone(&self) -> Self {
251 unsafe { (self.vtable.clone)(self) }
252 }
253}
254
255impl Drop for Binary {
256 fn drop(&mut self) {
257 unsafe { (self.vtable.drop)(self) }
258 }
259}
260
261impl From<&'static str> for Binary {
262 fn from(value: &'static str) -> Self {
263 Binary::from_static(value.as_bytes())
264 }
265}
266
267impl From<&'static [u8]> for Binary {
268 fn from(value: &'static [u8]) -> Self {
269 Binary::from_static(value)
270 }
271}
272
273impl From<Box<[u8]>> for Binary {
274 fn from(value: Box<[u8]>) -> Self {
275 if value.len() == 0 {
276 return Binary::new();
277 }
278 let len = value.len();
279 let ptr = Box::into_raw(value) as *mut u8;
280 Binary {
281 ptr,
282 len,
283 mark: 0,
284 cursor: 0,
285 counter: Rc::new(RefCell::new(AtomicUsize::new(1))),
286 vtable: &SHARED_VTABLE,
287 }
288 }
289}
290
291impl From<Vec<u8>> for Binary {
292 fn from(value: Vec<u8>) -> Self {
293 Binary::from(value.into_boxed_slice())
294 }
295}
296
297impl Bt for Binary {
298 fn remaining(&self) -> usize {
299 self.len
300 }
301
302 fn chunk(&self) -> &[u8] {
303 self.as_slice()
304 }
305
306 fn advance_chunk(&mut self, n: usize) -> &[u8] {
307 let ret = &unsafe { slice::from_raw_parts(self.ptr, self.len) }[..n];
308 self.advance(n);
309 ret
310 }
311
312 fn advance(&mut self, n: usize) {
313 unsafe {
314 self.inc_start(n);
315 }
316 }
317
318 fn into_binary(self) -> Binary {
319 self
320 }
321}
322
323impl Read for Binary {
324 #[inline(always)]
325 fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
326 let left = self.remaining();
327 if left == 0 || buf.len() == 0 {
328 return Err(Error::new(io::ErrorKind::WouldBlock, ""));
329 }
330 let read = std::cmp::min(left, buf.len());
331 unsafe {
332 std::ptr::copy(&self.chunk()[0], &mut buf[0], read);
333 }
334 self.advance(read);
335 Ok(read)
336 }
337}
338
339impl Iterator for Binary {
340 type Item = u8;
341 #[inline]
342 fn next(&mut self) -> Option<u8> {
343 self.get_next()
344 }
345}
346
347impl Deref for Binary {
348 type Target = [u8];
349
350 #[inline]
351 fn deref(&self) -> &[u8] {
352 self.as_slice()
353 }
354}
355
356impl Debug for Binary {
357 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
358 f.debug_struct("Binary")
359 .field("ptr", &self.ptr)
360 .field("counter", &self.counter)
361 .field("cursor", &self.cursor)
362 .field("mark", &self.mark)
363 .field("len", &self.len)
364 .finish()
365 }
366}
367
368impl AsRef<[u8]> for Binary {
369 #[inline]
370 fn as_ref(&self) -> &[u8] {
371 self.as_slice()
372 }
373}
374
375impl hash::Hash for Binary {
376 fn hash<H>(&self, state: &mut H)
377 where
378 H: hash::Hasher,
379 {
380 self.as_slice().hash(state);
381 }
382}
383
384impl Borrow<[u8]> for Binary {
385 fn borrow(&self) -> &[u8] {
386 self.as_slice()
387 }
388}
389
390impl PartialEq for Binary {
391 fn eq(&self, other: &Binary) -> bool {
392 self.as_slice() == other.as_slice()
393 }
394}
395
396impl PartialOrd for Binary {
397 fn partial_cmp(&self, other: &Binary) -> Option<cmp::Ordering> {
398 self.as_slice().partial_cmp(other.as_slice())
399 }
400}
401
402impl Ord for Binary {
403 fn cmp(&self, other: &Binary) -> cmp::Ordering {
404 self.as_slice().cmp(other.as_slice())
405 }
406}
407
408impl Eq for Binary {}
409
410impl PartialEq<[u8]> for Binary {
411 fn eq(&self, other: &[u8]) -> bool {
412 self.as_slice() == other
413 }
414}
415
416impl PartialOrd<[u8]> for Binary {
417 fn partial_cmp(&self, other: &[u8]) -> Option<cmp::Ordering> {
418 self.as_slice().partial_cmp(other)
419 }
420}
421
422impl PartialEq<Binary> for [u8] {
423 fn eq(&self, other: &Binary) -> bool {
424 *other == *self
425 }
426}
427
428impl PartialOrd<Binary> for [u8] {
429 fn partial_cmp(&self, other: &Binary) -> Option<cmp::Ordering> {
430 <[u8] as PartialOrd<[u8]>>::partial_cmp(self, other)
431 }
432}
433
434impl PartialEq<str> for Binary {
435 fn eq(&self, other: &str) -> bool {
436 self.as_slice() == other.as_bytes()
437 }
438}
439
440impl PartialOrd<str> for Binary {
441 fn partial_cmp(&self, other: &str) -> Option<cmp::Ordering> {
442 self.as_slice().partial_cmp(other.as_bytes())
443 }
444}
445
446impl PartialEq<Binary> for str {
447 fn eq(&self, other: &Binary) -> bool {
448 *other == *self
449 }
450}
451
452impl PartialOrd<Binary> for str {
453 fn partial_cmp(&self, other: &Binary) -> Option<cmp::Ordering> {
454 <[u8] as PartialOrd<[u8]>>::partial_cmp(self.as_bytes(), other)
455 }
456}
457
458impl PartialEq<Vec<u8>> for Binary {
459 fn eq(&self, other: &Vec<u8>) -> bool {
460 *self == other[..]
461 }
462}
463
464impl PartialOrd<Vec<u8>> for Binary {
465 fn partial_cmp(&self, other: &Vec<u8>) -> Option<cmp::Ordering> {
466 self.as_slice().partial_cmp(&other[..])
467 }
468}
469
470impl PartialEq<Binary> for Vec<u8> {
471 fn eq(&self, other: &Binary) -> bool {
472 *other == *self
473 }
474}
475
476impl PartialOrd<Binary> for Vec<u8> {
477 fn partial_cmp(&self, other: &Binary) -> Option<cmp::Ordering> {
478 <[u8] as PartialOrd<[u8]>>::partial_cmp(self, other)
479 }
480}
481
482impl PartialEq<String> for Binary {
483 fn eq(&self, other: &String) -> bool {
484 *self == other[..]
485 }
486}
487
488impl PartialOrd<String> for Binary {
489 fn partial_cmp(&self, other: &String) -> Option<cmp::Ordering> {
490 self.as_slice().partial_cmp(other.as_bytes())
491 }
492}
493
494impl PartialEq<Binary> for String {
495 fn eq(&self, other: &Binary) -> bool {
496 *other == *self
497 }
498}
499
500impl PartialOrd<Binary> for String {
501 fn partial_cmp(&self, other: &Binary) -> Option<cmp::Ordering> {
502 <[u8] as PartialOrd<[u8]>>::partial_cmp(self.as_bytes(), other)
503 }
504}
505
506impl PartialEq<Binary> for &[u8] {
507 fn eq(&self, other: &Binary) -> bool {
508 *other == *self
509 }
510}
511
512impl PartialOrd<Binary> for &[u8] {
513 fn partial_cmp(&self, other: &Binary) -> Option<cmp::Ordering> {
514 <[u8] as PartialOrd<[u8]>>::partial_cmp(self, other)
515 }
516}
517
518impl PartialEq<Binary> for &str {
519 fn eq(&self, other: &Binary) -> bool {
520 *other == *self
521 }
522}
523
524impl PartialOrd<Binary> for &str {
525 fn partial_cmp(&self, other: &Binary) -> Option<cmp::Ordering> {
526 <[u8] as PartialOrd<[u8]>>::partial_cmp(self.as_bytes(), other)
527 }
528}
529
530impl<'a, T: ?Sized> PartialEq<&'a T> for Binary
531where
532 Binary: PartialEq<T>,
533{
534 fn eq(&self, other: &&'a T) -> bool {
535 *self == **other
536 }
537}
538
539impl<'a, T: ?Sized> PartialOrd<&'a T> for Binary
540where
541 Binary: PartialOrd<T>,
542{
543 fn partial_cmp(&self, other: &&'a T) -> Option<cmp::Ordering> {
544 self.partial_cmp(&**other)
545 }
546}
547
548impl Default for Binary {
551 #[inline]
552 fn default() -> Binary {
553 Binary::new()
554 }
555}
556
557#[cfg(test)]
558mod tests {
559 use super::Binary;
560
561 #[test]
562 fn binary_refs() {
563 {
564 let s = Binary::from("aaaa");
565 let s1 = s.clone();
566 assert!(s1.get_refs() == 0);
567 drop(s1);
568 assert!(s.get_refs() == 0);
569 }
570 {
571 let b = Binary::from(vec![1]);
572 let b1 = b.clone();
573 assert!(b1.get_refs() == 2);
574 drop(b1);
575 assert!(b.get_refs() == 1);
576 }
577 }
578}