1use super::common::{Interned, RawInterned, UnsafeLock};
2use bumpalo::Bump;
3use hashbrown::{HashTable, hash_table::Entry};
4use std::{
5 alloc::Layout,
6 fmt::{self, Display, Write},
7 hash::{BuildHasher, Hash},
8 mem::MaybeUninit,
9 ptr::{self, NonNull},
10 slice,
11};
12
13pub trait Dropless {
62 fn as_byte_ptr(&self) -> NonNull<u8> {
64 NonNull::from_ref(self).cast::<u8>()
65 }
66
67 fn layout(&self) -> Layout {
69 Layout::for_value(self)
70 }
71
72 unsafe fn from_bytes(bytes: &[u8]) -> &Self;
82
83 unsafe fn hash<S: BuildHasher>(build_hasher: &S, bytes: &[u8]) -> u64;
93
94 unsafe fn eq(a: &[u8], b: &[u8]) -> bool;
104}
105
106macro_rules! simple_impl_dropless_fn {
107 (from_bytes) => {
108 unsafe fn from_bytes(bytes: &[u8]) -> &Self {
109 let ptr = bytes.as_ptr().cast::<Self>();
110 unsafe { ptr.as_ref().unwrap_unchecked() }
111 }
112 };
113 (hash) => {
114 unsafe fn hash<S: BuildHasher>(build_hasher: &S, bytes: &[u8]) -> u64 {
115 let this = unsafe { Self::from_bytes(bytes) };
116 build_hasher.hash_one(this)
117 }
118 };
119 (eq) => {
120 unsafe fn eq(a: &[u8], b: &[u8]) -> bool {
121 unsafe { Self::from_bytes(a) == Self::from_bytes(b) }
122 }
123 };
124}
125
126macro_rules! impl_dropless {
127 ($($ty:ty)*) => {
128 $(
129 impl Dropless for $ty {
130 simple_impl_dropless_fn!(from_bytes);
131 simple_impl_dropless_fn!(hash);
132 simple_impl_dropless_fn!(eq);
133 }
134 )*
135 };
136}
137
138impl_dropless!(i8 i16 i32 i64 i128 isize u8 u16 u32 u64 u128 usize bool char);
139
140impl<T: Dropless + Hash + Eq> Dropless for [T] {
141 unsafe fn from_bytes(bytes: &[u8]) -> &Self {
142 let ptr = bytes.as_ptr().cast::<T>();
143 let stride = Layout::new::<T>().pad_to_align().size();
144 let num_elems = bytes.len() / stride;
145 unsafe { slice::from_raw_parts(ptr, num_elems) }
146 }
147 simple_impl_dropless_fn!(hash);
148 simple_impl_dropless_fn!(eq);
149}
150
151impl<T: Dropless + Hash + Eq, const N: usize> Dropless for [T; N] {
152 simple_impl_dropless_fn!(from_bytes);
153 simple_impl_dropless_fn!(hash);
154 simple_impl_dropless_fn!(eq);
155}
156
157impl Dropless for str {
158 unsafe fn from_bytes(bytes: &[u8]) -> &Self {
159 unsafe { str::from_utf8_unchecked(bytes) }
160 }
161 simple_impl_dropless_fn!(hash);
162 simple_impl_dropless_fn!(eq);
163}
164
165impl<T: Dropless + Hash + Eq> Dropless for Option<T> {
166 simple_impl_dropless_fn!(from_bytes);
167 simple_impl_dropless_fn!(hash);
168 simple_impl_dropless_fn!(eq);
169}
170
171impl<T: Dropless + Hash + Eq, E: Dropless + Hash + Eq> Dropless for Result<T, E> {
172 simple_impl_dropless_fn!(from_bytes);
173 simple_impl_dropless_fn!(hash);
174 simple_impl_dropless_fn!(eq);
175}
176
177#[derive(Debug)]
214pub struct DroplessInterner<S = fxhash::FxBuildHasher> {
215 inner: UnsafeLock<DroplessInternSet<S>>,
216}
217
218impl DroplessInterner {
219 pub fn new() -> Self {
220 Self::default()
221 }
222}
223
224impl<S: BuildHasher> DroplessInterner<S> {
225 pub fn with_hasher(hash_builder: S) -> Self {
226 let inner = unsafe { UnsafeLock::new(DroplessInternSet::with_hasher(hash_builder)) };
228 Self { inner }
229 }
230
231 pub fn len(&self) -> usize {
233 self.with_inner(|set| set.len())
234 }
235
236 pub fn is_empty(&self) -> bool {
238 self.with_inner(|set| set.is_empty())
239 }
240
241 pub fn intern<K: Dropless + ?Sized>(&self, value: &K) -> Interned<'_, K> {
271 self.with_inner(|set| set.intern(value))
272 }
273
274 pub fn intern_formatted_str<K: Display + ?Sized>(
301 &self,
302 value: &K,
303 upper_size: usize,
304 ) -> Result<Interned<'_, str>, fmt::Error> {
305 self.with_inner(|set| set.intern_formatted_str(value, upper_size))
306 }
307
308 pub fn get<K: Dropless + ?Sized>(&self, value: &K) -> Option<Interned<'_, K>> {
327 self.with_inner(|set| set.get(value))
328 }
329
330 pub fn clear(&mut self) {
335 self.with_inner(|set| set.clear())
336 }
337
338 fn with_inner<'this, F, R>(&'this self, f: F) -> R
339 where
340 F: FnOnce(&'this mut DroplessInternSet<S>) -> R,
341 R: 'this,
342 {
343 unsafe {
345 let set = self.inner.lock().as_mut();
346 let ret = f(set);
347 self.inner.unlock();
348 ret
349 }
350 }
351}
352
353impl<S: Default> Default for DroplessInterner<S> {
354 fn default() -> Self {
355 let inner = unsafe { UnsafeLock::new(DroplessInternSet::default()) };
357 Self { inner }
358 }
359}
360
361#[derive(Debug, Default)]
365pub struct DroplessInternSet<S = fxhash::FxBuildHasher> {
366 bump: Bump,
367 str_buf: StringBuffer,
368 set: HashTable<DynInternEntry<S>>,
369 hash_builder: S,
370}
371
372impl DroplessInternSet {
373 pub fn new() -> Self {
374 Self::default()
375 }
376}
377
378impl<S: BuildHasher> DroplessInternSet<S> {
379 pub fn with_hasher(hash_builder: S) -> Self {
380 Self {
381 bump: Bump::new(),
382 str_buf: StringBuffer::default(),
383 set: HashTable::new(),
384 hash_builder,
385 }
386 }
387
388 pub fn intern<K: Dropless + ?Sized>(&mut self, value: &K) -> Interned<'_, K> {
389 let src = value.as_byte_ptr();
390 let layout = value.layout();
391
392 unsafe {
393 if layout.size() == 0 {
396 let ptr = value as *const K;
397 let ptr = ptr.with_addr(layout.align()); return Interned::unique(&*ptr);
399 }
400
401 let bytes = slice::from_raw_parts(src.as_ptr(), layout.size());
402 let hash = <K as Dropless>::hash(&self.hash_builder, bytes);
403 let eq = Self::table_eq::<K>(bytes, layout);
404 let hasher = Self::table_hasher(&self.hash_builder);
405 let ref_ = match self.set.entry(hash, eq, hasher) {
406 Entry::Occupied(entry) => Self::ref_from_entry(entry.get()),
407 Entry::Vacant(entry) => {
408 let dst = self.bump.alloc_layout(layout);
410 ptr::copy_nonoverlapping(src.as_ptr(), dst.as_ptr(), layout.size());
411
412 let occupied = entry.insert(DynInternEntry {
414 data: RawInterned(dst).cast(),
415 layout,
416 hash: <K as Dropless>::hash,
417 });
418
419 Self::ref_from_entry(occupied.get())
420 }
421 };
422 Interned::unique(ref_)
423 }
424 }
425
426 pub fn intern_formatted_str<K: Display + ?Sized>(
427 &mut self,
428 value: &K,
429 upper_size: usize,
430 ) -> Result<Interned<'_, str>, fmt::Error> {
431 let mut write_buf = self.str_buf.speculative_alloc(upper_size);
432 write!(write_buf, "{value}")?;
433 let bytes = write_buf.as_bytes();
434
435 unsafe {
436 let value = str::from_utf8_unchecked(bytes);
440 let layout = Layout::from_size_align_unchecked(bytes.len(), 1);
441
442 if bytes.is_empty() {
444 let ptr = value as *const str;
445 let ptr = ptr.with_addr(layout.align()); return Ok(Interned::unique(&*ptr));
447 }
448
449 let hash = <str as Dropless>::hash(&self.hash_builder, bytes);
450 let eq = Self::table_eq::<str>(bytes, layout);
451 let hasher = Self::table_hasher(&self.hash_builder);
452 let ref_ = match self.set.entry(hash, eq, hasher) {
453 Entry::Occupied(entry) => {
454 Self::ref_from_entry(entry.get())
459 }
460 Entry::Vacant(entry) => {
461 let ptr = NonNull::new_unchecked(bytes.as_ptr().cast_mut());
463
464 write_buf.commit();
467
468 let occupied = entry.insert(DynInternEntry {
470 data: RawInterned(ptr).cast(),
471 layout,
472 hash: <str as Dropless>::hash,
473 });
474
475 Self::ref_from_entry(occupied.get())
476 }
477 };
478 Ok(Interned::unique(ref_))
479 }
480 }
481
482 pub fn get<K: Dropless + ?Sized>(&self, value: &K) -> Option<Interned<'_, K>> {
495 let ptr = value.as_byte_ptr();
496 let layout = value.layout();
497 unsafe {
498 let bytes = slice::from_raw_parts(ptr.as_ptr(), layout.size());
499 let hash = <K as Dropless>::hash(&self.hash_builder, bytes);
500 let eq = Self::table_eq::<K>(bytes, layout);
501 let entry = self.set.find(hash, eq)?;
502 let ref_ = Self::ref_from_entry(entry);
503 Some(Interned::unique(ref_))
504 }
505 }
506
507 pub fn len(&self) -> usize {
508 self.set.len()
509 }
510
511 pub fn is_empty(&self) -> bool {
512 self.len() == 0
513 }
514
515 pub fn clear(&mut self) {
516 self.bump.reset();
517 self.set.clear();
518 }
519
520 #[inline]
522 fn table_eq<K: Dropless + ?Sized>(
523 key: &[u8],
524 layout: Layout,
525 ) -> impl FnMut(&DynInternEntry<S>) -> bool {
526 move |entry: &DynInternEntry<S>| unsafe {
527 if layout == entry.layout {
528 let entry_bytes =
529 slice::from_raw_parts(entry.data.cast::<u8>().as_ptr(), entry.layout.size());
530 <K as Dropless>::eq(key, entry_bytes)
531 } else {
532 false
533 }
534 }
535 }
536
537 #[inline]
539 fn table_hasher(hash_builder: &S) -> impl Fn(&DynInternEntry<S>) -> u64 {
540 |entry: &DynInternEntry<S>| unsafe {
541 let entry_bytes =
542 slice::from_raw_parts(entry.data.cast::<u8>().as_ptr(), entry.layout.size());
543 (entry.hash)(hash_builder, entry_bytes)
545 }
546 }
547
548 #[inline]
549 unsafe fn ref_from_entry<'a, K: Dropless + ?Sized>(entry: &DynInternEntry<S>) -> &'a K {
550 unsafe {
551 let bytes =
552 slice::from_raw_parts(entry.data.cast::<u8>().as_ptr(), entry.layout.size());
553 K::from_bytes(bytes)
554 }
555 }
556}
557
558#[derive(Debug, Default)]
560struct StringBuffer {
561 chunks: Vec<Box<[MaybeUninit<u8>]>>,
562 last_chunk_start: usize,
563}
564
565const INIT_CHUNK_SIZE: usize = 1 << 5;
566const GLOW_MAX_CHUNK_SIZE: usize = 1 << 12;
567
568impl StringBuffer {
569 fn speculative_alloc(&mut self, upper_size: usize) -> StringWriteBuffer<'_> {
570 match self.chunks.last() {
571 None => {
572 let chunk_size = INIT_CHUNK_SIZE.max(upper_size.next_power_of_two());
573 self.append_new_chunk(chunk_size)
574 }
575 Some(last_chunk) if last_chunk.len() - self.last_chunk_start < upper_size => {
576 let chunk_size = (last_chunk.len() * 2)
577 .min(GLOW_MAX_CHUNK_SIZE)
578 .max(upper_size.next_power_of_two());
579 self.append_new_chunk(chunk_size);
580 }
581 _ => {}
582 }
583
584 let last_chunk = unsafe { self.chunks.last_mut().unwrap_unchecked() };
586
587 let start = self.last_chunk_start;
588 let end = self.last_chunk_start + upper_size;
589 let buf = &mut last_chunk[start..end];
590 StringWriteBuffer {
591 buf,
592 last_chuck_start: &mut self.last_chunk_start,
593 written: 0,
594 }
595 }
596
597 #[inline]
598 fn append_new_chunk(&mut self, chunk_size: usize) {
599 let chunk = Box::new_uninit_slice(chunk_size);
600 self.chunks.push(chunk);
601 self.last_chunk_start = 0;
602 }
603}
604
605struct StringWriteBuffer<'a> {
606 buf: &'a mut [MaybeUninit<u8>],
607 last_chuck_start: &'a mut usize,
608 written: usize,
609}
610
611impl<'a> StringWriteBuffer<'a> {
612 #[inline]
613 fn as_bytes(&self) -> &[u8] {
614 let ptr = self.buf.as_ptr().cast::<u8>();
615 unsafe { slice::from_raw_parts(ptr, self.written) }
616 }
617
618 #[inline]
619 fn commit(self) {
620 *self.last_chuck_start += self.written;
621 }
622}
623
624impl Write for StringWriteBuffer<'_> {
625 fn write_str(&mut self, s: &str) -> std::fmt::Result {
626 let size = s.len();
627
628 if self.buf.len() - self.written >= size {
629 let src = s.as_ptr();
630
631 let buf = unsafe { self.buf.get_unchecked_mut(self.written..) };
633 let dst = buf.as_mut_ptr().cast::<u8>();
634
635 unsafe { ptr::copy_nonoverlapping(src, dst, size) };
641
642 self.written += size;
643 Ok(())
644 } else {
645 Err(std::fmt::Error)
646 }
647 }
648}
649
650#[derive(Debug)]
651struct DynInternEntry<S> {
652 data: RawInterned,
653 layout: Layout,
654 hash: unsafe fn(&S, &[u8]) -> u64,
656}
657
658#[cfg(test)]
659mod tests {
660 use super::*;
661 use crate::common;
662 use std::num::NonZeroU32;
663
664 #[test]
665 fn test_dropless_interner() {
666 test_dropless_interner_int();
667 test_dropless_interner_str();
668 test_dropless_interner_bytes();
669 test_dropless_interner_mixed();
670 test_dropless_interner_many();
671 test_dropless_interner_alignment_handling();
672 test_dropless_interner_complex_display_type();
673 }
674
675 fn test_dropless_interner_int() {
676 let interner = DroplessInterner::new();
677
678 let a = interner.intern(&0_u32).erased_raw();
679 let b = interner.intern(&0_u32).erased_raw();
680 let c = interner.intern(&1_u32).erased_raw();
681 let d = interner.intern(&1_i32).erased_raw();
683
684 let groups: [&[RawInterned]; _] = [&[a, b], &[c, d]];
685 common::assert_group_addr_eq(&groups);
686 }
687
688 fn test_dropless_interner_str() {
689 let interner = DroplessInterner::new();
690
691 let a = interner.intern("apple").erased_raw();
693 let b = interner.intern(*Box::new("apple")).erased_raw();
694 let c = interner.intern(&*String::from("apple")).erased_raw();
695
696 let d = interner.intern("banana").erased_raw();
698
699 let e = interner.intern("").erased_raw();
701 let f = interner.intern(&*String::from("")).erased_raw();
702
703 let g = interner.intern("42").erased_raw();
705 let h = interner.intern_formatted_str(&42, 2).unwrap().erased_raw();
706
707 let i = interner
709 .intern_formatted_str(&NonZeroU32::new(43).unwrap(), 2)
710 .unwrap()
711 .erased_raw();
712 let j = interner.intern(&*43.to_string()).erased_raw();
713
714 let groups: [&[RawInterned]; _] = [&[a, b, c], &[d], &[e, f], &[g, h], &[i, j]];
715 common::assert_group_addr_eq(&groups);
716 }
717
718 fn test_dropless_interner_bytes() {
719 let interner = DroplessInterner::new();
720
721 let a = interner.intern(&[0, 1]).erased_raw();
722 let boxed: Box<[i32]> = Box::new([0, 1]);
723 let b = interner.intern(&*boxed).erased_raw();
724 let c = interner.intern(&*vec![0, 1]).erased_raw();
725 let d = interner.intern(&[2, 3]).erased_raw();
726
727 let groups: [&[RawInterned]; _] = [&[a, b, c], &[d]];
728 common::assert_group_addr_eq(&groups);
729 }
730
731 fn test_dropless_interner_mixed() {
732 let interner = DroplessInterner::new();
733
734 interner.intern("apple");
735 interner.intern(&1_u32);
736 interner.intern(&[2, 3]);
737 interner.intern_formatted_str(&42, 10).unwrap();
738
739 assert_eq!(interner.get("apple").as_deref(), Some("apple"));
740 assert!(interner.get("banana").is_none());
741
742 assert_eq!(interner.get(&1_u32).as_deref(), Some(&1_u32));
743 assert!(interner.get(&2_u32).is_none());
744
745 assert_eq!(interner.get(&[2, 3]).as_deref(), Some(&[2, 3]));
746 assert!(interner.get(&[2]).is_none());
747
748 assert_eq!(interner.get("42").as_deref(), Some("42"));
749 }
750
751 fn test_dropless_interner_many() {
752 let interner = DroplessInterner::new();
753
754 let mut interned_int = Vec::new();
755 let mut interned_str = Vec::new();
756 let mut interned_bytes = Vec::new();
757
758 const N: usize = 1000;
759
760 let strs = (0..N).map(|i| (i * 10_000).to_string()).collect::<Vec<_>>();
761
762 for i in 0..N {
764 let int = &(i as u32); let str_ = &*strs[i]; let bytes = &[i as u16]; interned_int.push(interner.intern(int).erased_raw());
768 interned_str.push(interner.intern(str_).erased_raw());
769 interned_bytes.push(interner.intern(bytes).erased_raw());
770 }
771
772 interned_int.sort_unstable();
774 interned_int.dedup();
775 interned_str.sort_unstable();
776 interned_str.dedup();
777 interned_bytes.sort_unstable();
778 interned_bytes.dedup();
779 assert_eq!(interned_int.len(), N);
780 assert_eq!(interned_str.len(), N);
781 assert_eq!(interned_bytes.len(), N);
782 let whole = interned_int
783 .iter()
784 .chain(&interned_str)
785 .chain(&interned_bytes)
786 .cloned()
787 .collect::<fxhash::FxHashSet<_>>();
788 assert_eq!(whole.len(), N * 3);
789
790 for i in 0..N {
792 let int = &(i as u32); let str_ = &*strs[i]; let bytes = &[i as u16]; assert_eq!(interner.get(int).as_deref(), Some(int));
796 assert_eq!(interner.get(str_).as_deref(), Some(str_));
797 assert_eq!(interner.get(bytes).as_deref(), Some(bytes));
798 }
799 }
800
801 #[rustfmt::skip]
802 fn test_dropless_interner_alignment_handling() {
803 #[derive(PartialEq, Eq, Hash, Debug)] #[repr(C, align(4))] struct T4 (u16, [u8; 2]);
804 #[derive(PartialEq, Eq, Hash, Debug)] #[repr(C, align(8))] struct T8 (u16, [u8; 6]);
805 #[derive(PartialEq, Eq, Hash, Debug)] #[repr(C, align(16))] struct T16 (u16, [u8; 14]);
806
807 macro_rules! impl_for_first_2bytes {
808 () => {
809 unsafe fn hash<S: BuildHasher>(hash_builder: &S, bytes: &[u8]) -> u64 {
810 hash_builder.hash_one(&bytes[0..2])
811 }
812 unsafe fn eq(a: &[u8], b: &[u8]) -> bool {
813 a[0..2] == b[0..2]
814 }
815 };
816 }
817
818 impl Dropless for T4 { simple_impl_dropless_fn!(from_bytes); impl_for_first_2bytes!(); }
819 impl Dropless for T8 { simple_impl_dropless_fn!(from_bytes); impl_for_first_2bytes!(); }
820 impl Dropless for T16 { simple_impl_dropless_fn!(from_bytes); impl_for_first_2bytes!(); }
821
822 let interner = DroplessInterner::new();
823
824 let mut interned_4 = Vec::new();
825 let mut interned_8 = Vec::new();
826 let mut interned_16 = Vec::new();
827
828 const N: usize = 1000;
829
830 for i in 0..N {
835 let t4 = T4(i as u16, [4; _]);
836 let t8 = T8(i as u16, [8; _]);
837 let t16 = T16(i as u16, [16; _]);
838 interned_4.push(interner.intern(&t4).erased_raw());
839 interned_8.push(interner.intern(&t8).erased_raw());
840 interned_16.push(interner.intern(&t16).erased_raw());
841 }
842
843 for i in 0..N {
844 let t4 = T4(i as u16, [4; _]);
845 let t8 = T8(i as u16, [8; _]);
846 let t16 = T16(i as u16, [16; _]);
847 unsafe {
848 assert_eq!(*<Interned<'_, T4>>::from_erased_raw(interned_4[i]), t4);
849 assert_eq!(*<Interned<'_, T8>>::from_erased_raw(interned_8[i]), t8);
850 assert_eq!(*<Interned<'_, T16>>::from_erased_raw(interned_16[i]), t16);
851 }
852 }
853 }
854
855 fn test_dropless_interner_complex_display_type() {
856 let interner = DroplessInterner::new();
857
858 #[allow(unused)]
859 #[derive(Debug)]
860 struct A<'a> {
861 int: i32,
862 float: f32,
863 text: &'a str,
864 bytes: &'a [u8],
865 }
866
867 impl Display for A<'_> {
868 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
869 fmt::Debug::fmt(self, f)
870 }
871 }
872
873 let a = A {
874 int: 123,
875 float: 456.789,
876 text: "this is a text",
877 bytes: &[1, 2, 3, 4, 5, 6, 7, 8, 9, 0],
878 };
879
880 let interned = interner.intern_formatted_str(&a, 1_000).unwrap();
881
882 let mut s = String::new();
883 write!(&mut s, "{a}").unwrap();
884 assert_eq!(&*interned, s.as_str());
885 }
886
887 #[test]
888 fn test_string_buffer() {
889 test_string_buffer_chunk();
890 test_string_buffer_discard();
891 test_string_buffer_insufficient_upper_size();
892 test_string_buffer_long_string();
893 }
894
895 fn test_string_buffer_chunk() {
896 let mut buf = StringBuffer::default();
897 assert_eq!(buf.chunks.len(), 0);
898
899 let s = "a".repeat(INIT_CHUNK_SIZE);
901 let mut write_buf = buf.speculative_alloc(s.len());
902 write_buf.write_str(&s).unwrap();
903 assert_eq!(write_buf.as_bytes(), s.as_bytes());
904 write_buf.commit();
905
906 assert_eq!(buf.chunks.len(), 1);
907 assert_eq!(buf.last_chunk_start, s.len());
908
909 let mut write_buf = buf.speculative_alloc(1);
911 write_buf.write_str("a").unwrap();
912 assert_eq!(write_buf.as_bytes(), b"a");
913 write_buf.commit();
914
915 assert_eq!(buf.chunks.len(), 2);
916 assert_eq!(buf.last_chunk_start, 1);
917
918 let mut write_buf = buf.speculative_alloc(GLOW_MAX_CHUNK_SIZE);
920 write_buf.write_str("aa").unwrap();
921 assert_eq!(write_buf.as_bytes(), b"aa");
922 write_buf.commit();
923
924 assert_eq!(buf.chunks.len(), 3);
925 assert_eq!(buf.last_chunk_start, 2);
926 }
927
928 fn test_string_buffer_discard() {
929 let mut buf = StringBuffer::default();
930
931 for _ in 0..10 {
933 let s = "a".repeat(INIT_CHUNK_SIZE);
934 let mut write_buf = buf.speculative_alloc(s.len());
935 write_buf.write_str(&s).unwrap();
936 assert_eq!(write_buf.as_bytes(), s.as_bytes());
937 drop(write_buf);
938
939 assert_eq!(buf.chunks.len(), 1);
940 assert_eq!(buf.last_chunk_start, 0);
941 }
942 }
943
944 fn test_string_buffer_insufficient_upper_size() {
945 let mut buf = StringBuffer::default();
946
947 for _ in 0..10 {
948 let mut write_buf = buf.speculative_alloc(5);
949 let res = write_buf.write_str("this is longer than 5");
950 assert!(res.is_err());
951 write_buf.commit();
952
953 assert_eq!(buf.chunks.len(), 1);
954 assert_eq!(buf.last_chunk_start, 0);
955 }
956 }
957
958 fn test_string_buffer_long_string() {
959 let mut buf = StringBuffer::default();
960
961 let s = "a".repeat(GLOW_MAX_CHUNK_SIZE * 10);
962 let mut write_buf = buf.speculative_alloc(s.len());
963 write_buf.write_str(&s).unwrap();
964 assert_eq!(write_buf.as_bytes(), s.as_bytes());
965 write_buf.commit();
966
967 assert_eq!(buf.chunks.len(), 1);
968 assert_eq!(buf.last_chunk_start, s.len());
969 }
970}