algorithm/buf/
binary_mut.rs1use std::{
14 cmp,
15 fmt::{self, Debug},
16 hash,
17 io::{self, Error, Read, Result, Write},
18 mem::MaybeUninit,
19 ops::{Deref, DerefMut},
20 ptr, slice, usize,
21};
22
23use super::{Binary, Bt};
24
25use super::BtMut;
26
27static RESORT_MEMORY_SIZE: usize = 102400;
29
30pub struct BinaryMut {
32 vec: Vec<u8>,
33 cursor: usize,
35 wpos: usize,
37 mark: usize,
39 resort: usize,
41}
42
43impl Clone for BinaryMut {
44 fn clone(&self) -> Self {
45 Self {
46 vec: self.vec.clone(),
47 cursor: self.cursor.clone(),
48 wpos: self.wpos.clone(),
49 mark: self.mark.clone(),
50 resort: self.resort.clone(),
51 }
52 }
53}
54
55impl BinaryMut {
56 #[inline]
57 pub fn with_capacity(n: usize) -> BinaryMut {
58 BinaryMut::from_vec(Vec::with_capacity(n))
59 }
60
61 #[inline]
75 pub fn new() -> BinaryMut {
76 BinaryMut::with_capacity(0)
77 }
78
79 #[inline]
80 pub(crate) fn from_vec(vec: Vec<u8>) -> BinaryMut {
81 BinaryMut {
82 wpos: vec.len(),
83 vec,
84 cursor: 0,
85 mark: usize::MAX,
86 resort: RESORT_MEMORY_SIZE,
87 }
88 }
89
90 #[inline]
91 pub fn into_slice_all(self) -> Vec<u8> {
92 self.vec[self.cursor..self.wpos].into()
93 }
94
95 #[inline]
96 pub fn as_slice(&self) -> &[u8] {
97 &self.vec[self.cursor..self.wpos]
98 }
99
100 #[inline]
101 fn as_slice_mut(&mut self) -> &mut [u8] {
102 &mut self.vec[self.cursor..self.wpos]
103 }
104
105 #[inline]
106 fn inc_start(&mut self, by: usize) {
107 debug_assert!(self.remaining() >= by, "internal: inc_start out of bounds");
109 self.cursor += by;
110 }
111
112 #[inline]
123 pub fn len(&self) -> usize {
124 self.wpos - self.cursor
125 }
126
127 #[inline]
128 pub fn cursor(&self) -> usize {
129 self.cursor
130 }
131
132 #[inline]
133 pub fn clear(&mut self) {
134 self.cursor = 0;
135 self.wpos = 0;
136 }
137 #[inline]
148 pub fn is_empty(&self) -> bool {
149 self.len() == 0
150 }
151
152 #[inline]
163 pub fn capacity(&self) -> usize {
164 self.vec.capacity()
165 }
166
167 pub fn reserve(&mut self, additional: usize) {
168 let len = self.vec.len();
169 let rem = self.vec.capacity() - len;
170 if rem >= additional {
171 return;
172 }
173 self.vec.reserve(additional)
174 }
175
176 pub fn put<T: super::Bt>(&mut self, mut src: T)
177 where
178 Self: Sized,
179 {
180 while src.has_remaining() {
181 let s = src.chunk();
182 let l = s.len();
183 self.extend_from_slice(s);
184 src.advance(l);
185 }
186 }
187
188 pub fn put_slice(&mut self, src: &[u8]) -> usize {
189 self.extend_from_slice(src);
190 src.len()
191 }
192
193 #[inline]
208 pub fn freeze(self) -> Binary {
209 Binary::from(self.into_slice_all())
210 }
211
212 pub fn copy_to_binary(&mut self) -> Binary {
213 let binary = Binary::from(self.chunk().to_vec());
214 self.advance_all();
215 binary
216 }
217
218 #[inline]
233 pub fn extend_from_slice(&mut self, extend: &[u8]) {
234 let cnt = extend.len();
235 self.reserve(cnt);
236
237 unsafe {
238 let dst = self.chunk_mut();
239 debug_assert!(dst.len() >= cnt);
241
242 ptr::copy_nonoverlapping(extend.as_ptr(), dst.as_mut_ptr().cast(), cnt);
243 }
244
245 unsafe {
246 self.advance_mut(cnt);
247 }
248 }
249
250 pub fn get_resort(&self) -> usize {
251 self.resort
252 }
253
254 pub fn set_resort(&mut self, resort: usize) {
255 self.resort = resort;
256 }
257
258 pub fn mark(&mut self) {
282 self.mark = self.cursor;
283 }
284
285 #[inline(always)]
286 pub fn clear_mark(&mut self) {
287 self.mark = usize::MAX;
288 }
289
290 pub fn rewind_mark(&mut self) -> bool {
291 if self.mark == usize::MAX {
292 false
293 } else {
294 if self.mark > self.wpos {
295 self.clear_mark();
296 false
297 } else {
298 self.cursor = self.mark;
299 true
300 }
301 }
302 }
303
304 pub fn data_mut(&mut self) -> &mut [u8] {
316 if self.wpos + 128 > self.vec.len() {
317 self.vec.resize(self.wpos + 128, 0);
318 }
319 &mut self.vec[self.wpos..]
320 }
321
322 #[inline]
323 pub unsafe fn try_resort_memory(&mut self) {
324 if self.vec.len() < self.resort || self.cursor < self.resort / 2 {
325 return;
326 }
327 let left = self.remaining();
328 if left * 2 > self.vec.len() {
330 return;
331 }
332 if left == 0 {
333 self.clear();
334 } else {
335 std::ptr::copy(
336 self.vec.as_ptr().add(self.cursor),
337 self.vec.as_mut_ptr(),
338 left,
339 );
340 self.cursor = 0;
341 self.wpos = left;
342 self.clear_mark();
343 }
344 }
345}
346
347impl From<Vec<u8>> for BinaryMut {
348 fn from(value: Vec<u8>) -> Self {
349 BinaryMut::from_vec(value)
350 }
351}
352
353impl Drop for BinaryMut {
354 fn drop(&mut self) {}
355}
356
357impl Bt for BinaryMut {
358 fn remaining(&self) -> usize {
359 std::cmp::min(self.wpos, self.vec.len()) - self.cursor
360 }
361
362 fn chunk(&self) -> &[u8] {
363 self.as_slice()
364 }
365
366 fn advance(&mut self, n: usize) {
367 self.inc_start(n);
368 }
369
370 fn advance_chunk(&mut self, n: usize) -> &[u8] {
371 let cursor = self.cursor;
372 self.inc_start(n);
373 let ret = &{
374 let end = std::cmp::min(self.wpos, cursor + n);
375 &self.vec[cursor..end]
376 }[..n];
377 ret
378 }
379
380 fn into_binary(self) -> Binary {
381 Binary::from(self.chunk().to_vec())
382 }
383}
384
385unsafe impl BtMut for BinaryMut {
386 fn remaining_mut(&self) -> usize {
387 usize::MAX - self.len()
388 }
389
390 unsafe fn advance_mut(&mut self, cnt: usize) {
391 self.wpos += cnt;
392 if self.wpos > self.vec.len() {
393 self.vec.set_len(self.wpos);
394 }
395 self.try_resort_memory();
396 }
397
398 fn chunk_mut(&mut self) -> &mut [MaybeUninit<u8>] {
399 if self.wpos == self.vec.capacity() {
400 self.reserve(128);
401 }
402 unsafe {
403 slice::from_raw_parts_mut(
404 self.vec.as_mut_ptr().add(self.wpos) as *mut MaybeUninit<u8>,
405 self.vec.capacity() - self.wpos,
406 )
407 }
408 }
409}
410
411impl AsRef<[u8]> for BinaryMut {
412 #[inline]
413 fn as_ref(&self) -> &[u8] {
414 self.as_slice()
415 }
416}
417
418impl Deref for BinaryMut {
419 type Target = [u8];
420
421 #[inline]
422 fn deref(&self) -> &[u8] {
423 self.as_ref()
424 }
425}
426
427impl AsMut<[u8]> for BinaryMut {
428 #[inline]
429 fn as_mut(&mut self) -> &mut [u8] {
430 self.as_slice_mut()
431 }
432}
433
434impl DerefMut for BinaryMut {
435 #[inline]
436 fn deref_mut(&mut self) -> &mut [u8] {
437 self.as_mut()
438 }
439}
440
441impl<'a> From<&'a [u8]> for BinaryMut {
442 fn from(src: &'a [u8]) -> BinaryMut {
443 BinaryMut::from_vec(src.to_vec())
444 }
445}
446
447impl<'a> From<&'a str> for BinaryMut {
448 fn from(src: &'a str) -> BinaryMut {
449 BinaryMut::from(src.as_bytes())
450 }
451}
452
453impl From<String> for BinaryMut {
454 fn from(src: String) -> BinaryMut {
455 BinaryMut::from_vec(src.into_bytes())
456 }
457}
458
459impl From<BinaryMut> for Binary {
460 fn from(src: BinaryMut) -> Binary {
461 src.freeze()
462 }
463}
464
465impl From<Binary> for BinaryMut {
466 fn from(src: Binary) -> BinaryMut {
467 BinaryMut::from(src.into_slice())
468 }
469}
470
471impl PartialEq for BinaryMut {
472 fn eq(&self, other: &BinaryMut) -> bool {
473 self.as_slice() == other.as_slice()
474 }
475}
476
477impl PartialOrd for BinaryMut {
478 fn partial_cmp(&self, other: &BinaryMut) -> Option<cmp::Ordering> {
479 self.as_slice().partial_cmp(other.as_slice())
480 }
481}
482
483impl Ord for BinaryMut {
484 fn cmp(&self, other: &BinaryMut) -> cmp::Ordering {
485 self.as_slice().cmp(other.as_slice())
486 }
487}
488
489impl Eq for BinaryMut {}
490
491impl Default for BinaryMut {
492 #[inline]
493 fn default() -> BinaryMut {
494 BinaryMut::new()
495 }
496}
497
498impl hash::Hash for BinaryMut {
499 fn hash<H>(&self, state: &mut H)
500 where
501 H: hash::Hasher,
502 {
503 let s: &[u8] = self.as_ref();
504 s.hash(state);
505 }
506}
507
508impl Iterator for BinaryMut {
509 type Item = u8;
510 #[inline]
511 fn next(&mut self) -> Option<u8> {
512 self.get_next()
513 }
514}
515
516impl fmt::Write for BinaryMut {
517 #[inline]
518 fn write_str(&mut self, s: &str) -> fmt::Result {
519 if self.remaining_mut() >= s.len() {
520 self.put_slice(s.as_bytes());
521 Ok(())
522 } else {
523 Err(fmt::Error)
524 }
525 }
526
527 #[inline]
528 fn write_fmt(&mut self, args: fmt::Arguments<'_>) -> fmt::Result {
529 fmt::write(self, args)
530 }
531}
532
533impl TryInto<String> for BinaryMut {
534 type Error = io::Error;
535
536 fn try_into(self) -> std::result::Result<String, Self::Error> {
537 Ok(String::from_utf8_lossy(&self.chunk()).to_string())
538 }
539}
540
541impl Read for BinaryMut {
542 #[inline(always)]
543 fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
544 let left = self.remaining();
545 if left == 0 || buf.len() == 0 {
546 return Err(Error::new(io::ErrorKind::WouldBlock, ""));
547 }
548 let read = std::cmp::min(left, buf.len());
549 unsafe {
550 std::ptr::copy(&self.chunk()[0], &mut buf[0], read);
551 }
552 self.advance(read);
553 Ok(read)
554 }
555}
556
557impl Write for BinaryMut {
558 #[inline(always)]
559 fn write(&mut self, buf: &[u8]) -> Result<usize> {
560 self.put_slice(buf);
561 Ok(buf.len())
562 }
563
564 fn flush(&mut self) -> Result<()> {
565 Ok(())
566 }
567}
568
569impl Debug for BinaryMut {
582 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
583 f.debug_struct("BinaryMut")
584 .field("ptr", &self.vec)
585 .field("cursor", &self.cursor)
586 .field("wpos", &self.wpos)
587 .field("mark", &self.mark)
588 .finish()
589 }
590}
591
592unsafe impl Sync for BinaryMut {}
593
594unsafe impl Send for BinaryMut {}
595
596#[cfg(test)]
597mod tests {}