1use std::{
36 mem::{replace, take, MaybeUninit},
37 ptr::NonNull,
38 str::from_utf8_unchecked,
39};
40
41use crate::{
42 arena::EntryIndex,
43 lexis::{ByteIndex, Length},
44 mem::{array_copy_to, array_shift},
45 report::{ld_assert, ld_unreachable},
46 syntax::Node,
47 units::{
48 storage::{
49 branch::BranchRef,
50 child::{ChildCount, ChildCursor, ChildIndex},
51 item::{Item, ItemRef, ItemRefVariant, Split},
52 nesting::PageLayer,
53 refs::TreeRefs,
54 string::PageString,
55 Cache,
56 PAGE_B,
57 PAGE_CAP,
58 },
59 Watcher,
60 },
61};
62
63pub(super) struct Page<N: Node> {
64 pub(super) parent: ChildCursor<N>,
65 pub(super) previous: Option<PageRef<N>>,
66 pub(super) next: Option<PageRef<N>>,
67 pub(super) occupied: ChildCount,
68 pub(super) spans: [Length; PAGE_CAP],
69 pub(super) string: PageString,
70 pub(super) tokens: [MaybeUninit<N::Token>; PAGE_CAP],
71 pub(super) chunks: [EntryIndex; PAGE_CAP],
72 pub(super) caches: [MaybeUninit<Option<Box<Cache>>>; PAGE_CAP],
73}
74
75impl<N: Node> Item for Page<N> {
76 const B: ChildCount = PAGE_B;
77 const CAP: ChildCount = PAGE_CAP;
78
79 type Node = N;
80
81 #[inline(always)]
82 fn occupied(&self) -> ChildCount {
83 self.occupied
84 }
85
86 #[inline(always)]
87 unsafe fn copy_to(
88 &self,
89 to: &mut Self,
90 source: ChildCount,
91 destination: ChildCount,
92 count: ChildCount,
93 ) {
94 ld_assert!(
95 source + count <= self.occupied,
96 "An attempt to copy non occupied data in Page.",
97 );
98
99 unsafe { array_copy_to(&self.spans, &mut to.spans, source, destination, count) };
100
101 unsafe {
102 self.string.copy_to(
103 self.occupied,
104 &mut to.string,
105 to.occupied,
106 source,
107 destination,
108 count,
109 )
110 }
111
112 unsafe { array_copy_to(&self.tokens, &mut to.tokens, source, destination, count) };
113
114 unsafe { array_copy_to(&self.chunks, &mut to.chunks, source, destination, count) };
115
116 unsafe { array_copy_to(&self.caches, &mut to.caches, source, destination, count) };
117 }
118
119 #[inline(always)]
120 unsafe fn inflate(&mut self, from: ChildIndex, count: ChildCount) {
121 ld_assert!(
122 from <= self.occupied,
123 "An attempt to inflate from out of bounds child in Page."
124 );
125 ld_assert!(
126 count + self.occupied <= Self::CAP,
127 "An attempt to inflate with overflow in Page."
128 );
129 ld_assert!(count > 0, "An attempt to inflate of empty range in Page.");
130
131 if from < self.occupied {
132 unsafe { array_shift(&mut self.spans, from, from + count, self.occupied - from) };
133 unsafe { array_shift(&mut self.tokens, from, from + count, self.occupied - from) };
134 unsafe { array_shift(&mut self.chunks, from, from + count, self.occupied - from) };
135 unsafe { array_shift(&mut self.caches, from, from + count, self.occupied - from) };
136 }
137
138 unsafe { self.string.inflate(self.occupied, from, count) };
139
140 self.occupied += count;
141 }
142
143 #[inline(always)]
144 unsafe fn deflate(&mut self, from: ChildIndex, count: ChildCount) -> bool {
145 ld_assert!(
146 from < self.occupied,
147 "An attempt to deflate from non occupied child in Page."
148 );
149 ld_assert!(
150 from + count <= self.occupied,
151 "An attempt to deflate with overflow in Page."
152 );
153 ld_assert!(count > 0, "An attempt to deflate of empty range.");
154
155 unsafe { self.string.deflate(self.occupied, from, count) };
156
157 if from + count < self.occupied {
158 unsafe {
159 array_shift(
160 &mut self.spans,
161 from + count,
162 from,
163 self.occupied - from - count,
164 )
165 };
166 unsafe {
167 array_shift(
168 &mut self.tokens,
169 from + count,
170 from,
171 self.occupied - from - count,
172 )
173 };
174 unsafe {
175 array_shift(
176 &mut self.chunks,
177 from + count,
178 from,
179 self.occupied - from - count,
180 )
181 };
182 unsafe {
183 array_shift(
184 &mut self.caches,
185 from + count,
186 from,
187 self.occupied - from - count,
188 )
189 };
190 }
191
192 self.occupied -= count;
193
194 self.occupied >= Self::B
195 }
196}
197
198impl<N: Node> Page<N> {
199 #[inline(always)]
200 pub(super) fn new(occupied: ChildCount) -> PageRef<N> {
201 ld_assert!(
202 occupied > 0,
203 "An attempt to create Page with zero occupied values."
204 );
205
206 ld_assert!(
207 occupied <= Self::CAP,
208 "An attempt to create Page with occupied value exceeding capacity."
209 );
210
211 let page = Self {
212 parent: ChildCursor::dangling(),
213 previous: None,
214 next: None,
215 occupied,
216 spans: Default::default(),
217 string: PageString::default(),
218 tokens: unsafe { MaybeUninit::uninit().assume_init() },
219 chunks: Default::default(),
220 caches: unsafe { MaybeUninit::uninit().assume_init() },
221 };
222
223 let pointer = unsafe { NonNull::new_unchecked(Box::into_raw(Box::new(page))) };
224
225 PageRef { pointer }
226 }
227
228 pub(super) fn take_lexis(
229 &mut self,
230 spans: &mut Vec<Length>,
231 tokens: &mut Vec<N::Token>,
232 indices: &mut Vec<ByteIndex>,
233 text: &mut String,
234 ) {
235 for index in 0..self.occupied {
236 spans.push({
237 let span = *unsafe { self.spans.get_unchecked(index) };
238 span
239 });
240
241 tokens.push({
242 let token = unsafe { self.tokens.get_unchecked(index) };
243 unsafe { token.assume_init_read() }
244 });
245
246 {
247 let byte_index = unsafe { self.string.get_byte_index(index) };
248 indices.push(text.len() + byte_index);
249 }
250
251 let _ = take(unsafe { self.caches.get_unchecked_mut(index).assume_init_mut() });
252 }
253
254 let string = unsafe { from_utf8_unchecked(self.string.bytes()) };
255
256 text.push_str(string);
257
258 self.occupied = 0;
259 }
260
261 pub(super) unsafe fn free_subtree(
264 mut self,
265 refs: &mut TreeRefs<N>,
266 watcher: &mut impl Watcher,
267 ) -> ChildCount {
268 for index in 0..self.occupied {
269 let token = unsafe { self.tokens.get_unchecked_mut(index) };
270
271 unsafe { token.assume_init_drop() };
272
273 let chunk_index = *unsafe { self.chunks.get_unchecked(index) };
274
275 let _ = unsafe { refs.chunks.remove_unchecked(chunk_index) };
276
277 let cache = take(unsafe { self.caches.get_unchecked_mut(index).assume_init_mut() });
278
279 if let Some(cache) = cache {
280 cache.free(refs, watcher);
281 }
282 }
283
284 self.occupied
285 }
286
287 pub(super) unsafe fn free(mut self) {
288 for index in 0..self.occupied {
289 let token = unsafe { self.tokens.get_unchecked_mut(index) };
290
291 unsafe { token.assume_init_drop() };
292
293 let _ = take(unsafe { self.caches.get_unchecked_mut(index).assume_init_mut() });
294 }
295 }
296}
297
298#[repr(transparent)]
299pub(super) struct PageRef<N: Node> {
300 pointer: NonNull<Page<N>>,
301}
302
303impl<N: Node> Clone for PageRef<N> {
304 #[inline(always)]
305 fn clone(&self) -> Self {
306 *self
307 }
308}
309
310impl<N: Node> Copy for PageRef<N> {}
311
312impl<N: Node> PartialEq for PageRef<N> {
313 #[inline(always)]
314 fn eq(&self, other: &Self) -> bool {
315 self.pointer == other.pointer
316 }
317}
318
319impl<N: Node> Eq for PageRef<N> {}
320
321impl<N: Node> ItemRef<(), N> for PageRef<N> {
322 type SelfLayer = PageLayer;
323
324 type Item = Page<N>;
325
326 #[inline(always)]
327 fn dangling() -> Self {
328 Self {
329 pointer: NonNull::dangling(),
330 }
331 }
332
333 #[inline(always)]
334 unsafe fn as_ref(&self) -> &Self::Item {
335 unsafe { self.pointer.as_ref() }
336 }
337
338 #[inline(always)]
339 unsafe fn as_mut(&mut self) -> &mut Self::Item {
340 unsafe { self.pointer.as_mut() }
341 }
342
343 #[inline(always)]
344 unsafe fn into_variant(self) -> ItemRefVariant<N> {
345 ItemRefVariant::from_page(self)
346 }
347
348 #[inline(always)]
349 unsafe fn into_owned(self) -> Box<Self::Item> {
350 unsafe { Box::from_raw(self.pointer.as_ptr()) }
351 }
352
353 #[inline(always)]
354 unsafe fn calculate_length(&self) -> Length {
355 let page = unsafe { self.as_ref() };
356
357 let mut length = 0;
358
359 for index in 0..page.occupied {
360 length += unsafe { page.spans.get_unchecked(index) };
361 }
362
363 length
364 }
365
366 #[inline(always)]
367 unsafe fn parent(&self) -> &ChildCursor<N> {
368 unsafe { &self.as_ref().parent }
369 }
370
371 #[inline(always)]
372 unsafe fn set_parent(&mut self, parent: ChildCursor<N>) {
373 unsafe { self.as_mut().parent = parent };
374 }
375
376 #[inline(always)]
377 unsafe fn parent_mut(&mut self) -> &mut BranchRef<Self::SelfLayer, N> {
378 let parent_entry_index = unsafe { &mut self.as_mut().parent };
379
380 ld_assert!(
381 !parent_entry_index.is_dangling(),
382 "An attempt to get parent from root.",
383 );
384
385 unsafe { parent_entry_index.item.as_branch_mut() }
386 }
387
388 unsafe fn update_children(
389 &mut self,
390 refs: &mut TreeRefs<N>,
391 from: ChildIndex,
392 count: ChildCount,
393 ) -> Length {
394 let self_variant = self.into_variant();
395
396 let page = unsafe { self.as_mut() };
397
398 ld_assert!(
399 from + count <= page.occupied,
400 "An attempt to update refs in non occupied data in Page.",
401 );
402
403 let mut length = 0;
404
405 for index in from..(from + count) {
406 length += *unsafe { page.spans.get_unchecked(index) };
407
408 {
409 let chunk_index = *unsafe { page.chunks.get_unchecked(index) };
410 let chunk_ref = unsafe { refs.chunks.get_unchecked_mut(chunk_index) };
411
412 chunk_ref.item = self_variant;
413 chunk_ref.index = index;
414 }
415 }
416
417 length
418 }
419
420 #[inline]
421 unsafe fn split(
422 &mut self,
423 refs: &mut TreeRefs<N>,
424 _children_split: Split<N>,
425 length: Length,
426 from: ChildIndex,
427 ) -> Split<N> {
428 let mut parent_split = Split::dangling();
429
430 let occupied = unsafe { self.as_ref().occupied };
431
432 ld_assert!(from < occupied, "Split at position out of bounds.",);
433
434 match from == 0 {
435 true => {
436 parent_split.right_span = length;
437 parent_split.right_item = unsafe { self.into_variant() };
438
439 parent_split.left_span = 0;
440 }
441
442 false => {
443 let left = unsafe { self.as_mut() };
444 let mut right_ref = Page::new(occupied - from);
445
446 match &mut left.next {
447 None => (),
448
449 Some(next) => {
450 unsafe { PageRef::interconnect(&mut right_ref, next) };
451
452 left.next = None;
453 }
454 };
455
456 unsafe { left.copy_to(right_ref.as_mut(), from, 0, occupied - from) };
457 unsafe { left.string.deflate(left.occupied, from, occupied - from) };
458 left.occupied = from;
459
460 parent_split.right_span =
461 unsafe { right_ref.update_children(refs, 0, occupied - from) };
462 parent_split.right_item = unsafe { right_ref.into_variant() };
463
464 parent_split.left_span = length - parent_split.right_span;
465 parent_split.left_item = unsafe { self.into_variant() };
466 }
467 }
468
469 parent_split
470 }
471}
472
473impl<N: Node> PageRef<N> {
474 #[inline(always)]
476 pub(super) unsafe fn interconnect(left: &mut Self, right: &mut Self) {
477 unsafe {
478 left.as_mut().next = Some(*right);
479 }
480
481 unsafe {
482 right.as_mut().previous = Some(*left);
483 }
484 }
485
486 #[inline(always)]
488 pub(super) unsafe fn disconnect_left(&mut self) {
489 unsafe {
490 self.as_mut().previous = None;
491 }
492 }
493
494 #[inline(always)]
496 pub(super) unsafe fn disconnect_right(&mut self) {
497 unsafe {
498 self.as_mut().next = None;
499 }
500 }
501
502 #[inline(always)]
506 pub(super) unsafe fn as_external_ref<'a>(&self) -> &'a Page<N> {
507 unsafe { self.pointer.as_ref() }
508 }
509
510 #[inline(always)]
514 pub(super) unsafe fn as_external_mut<'a>(&self) -> &'a mut Page<N> {
515 let mut pointer = self.pointer;
516
517 unsafe { pointer.as_mut() }
518 }
519
520 #[inline]
528 pub(super) unsafe fn rewrite(
529 &mut self,
530 refs: &mut TreeRefs<N>,
531 watcher: &mut impl Watcher,
532 from: ChildIndex,
533 count: ChildCount,
534 spans: &mut impl Iterator<Item = Length>,
535 indices: &mut &[ByteIndex],
536 tokens: &mut impl Iterator<Item = N::Token>,
537 text: &str,
538 ) -> (Length, Length) {
539 let page = unsafe { self.as_mut() };
540
541 ld_assert!(
542 from < page.occupied,
543 "An attempt to rewrite from non occupied child in Page."
544 );
545 ld_assert!(
546 from + count <= page.occupied,
547 "An attempt to rewrite with overflow in Page."
548 );
549 ld_assert!(count > 0, "An attempt to rewrite of empty range.");
550
551 let mut dec = 0;
552 let mut inc = 0;
553
554 refs.chunks.commit(true);
555
556 unsafe {
557 page.string
558 .rewrite(page.occupied, from, text.as_bytes(), indices, count)
559 };
560
561 *indices = unsafe { &indices[count..] };
562
563 for index in from..(from + count) {
564 ld_assert!(index < Page::<N>::CAP, "Chunk index is out of bounds.",);
565
566 let new_span = match spans.next() {
567 Some(span) => span,
568 None => unsafe { ld_unreachable!("Spans iterator exceeded.") },
569 };
570
571 ld_assert!(new_span > 0, "Zero input span.");
572
573 let new_token = match tokens.next() {
574 Some(token) => token,
575 None => unsafe { ld_unreachable!("Tokens iterator exceeded.") },
576 };
577
578 let span = unsafe { page.spans.get_unchecked_mut(index) };
579 let token = unsafe { page.tokens.get_unchecked_mut(index).assume_init_mut() };
580 let chunk_index = unsafe { *page.chunks.get_unchecked(index) };
581 let cache = take(unsafe { page.caches.get_unchecked_mut(index).assume_init_mut() });
582
583 dec += *span;
584 inc += new_span;
585
586 *span = new_span;
587 let _ = replace(token, new_token);
588
589 unsafe { refs.chunks.upgrade(chunk_index) };
590
591 if let Some(cache) = cache {
592 cache.free(refs, watcher);
593 }
594 }
595
596 (dec, inc)
597 }
598
599 #[inline]
606 pub(super) unsafe fn remove(
607 &mut self,
608 refs: &mut TreeRefs<N>,
609 watcher: &mut impl Watcher,
610 from: ChildIndex,
611 count: ChildCount,
612 ) -> Length {
613 let page = unsafe { self.as_mut() };
614
615 ld_assert!(
616 from < page.occupied,
617 "An attempt to remove from non occupied child in Page."
618 );
619 ld_assert!(
620 from + count <= page.occupied,
621 "An attempt to remove with overflow in Page."
622 );
623 ld_assert!(count > 0, "An attempt to remove of empty range.");
624
625 let mut length = 0;
626
627 for index in from..(from + count) {
628 let span = unsafe { *page.spans.get_unchecked(index) };
629
630 unsafe { page.tokens.get_unchecked_mut(index).assume_init_drop() };
631
632 let chunk_index = unsafe { *page.chunks.get_unchecked(index) };
633
634 let _ = unsafe { refs.chunks.remove_unchecked(chunk_index) };
635
636 let cache = take(unsafe { page.caches.get_unchecked_mut(index).assume_init_mut() });
637
638 if let Some(cache) = cache {
639 cache.free(refs, watcher);
640 }
641
642 length += span;
643 }
644
645 if from + count < page.occupied {
646 unsafe {
647 array_shift(
648 &mut page.spans,
649 from + count,
650 from,
651 page.occupied - from - count,
652 )
653 };
654 unsafe {
655 array_shift(
656 &mut page.tokens,
657 from + count,
658 from,
659 page.occupied - from - count,
660 )
661 };
662 unsafe {
663 array_shift(
664 &mut page.chunks,
665 from + count,
666 from,
667 page.occupied - from - count,
668 )
669 };
670 unsafe {
671 array_shift(
672 &mut page.caches,
673 from + count,
674 from,
675 page.occupied - from - count,
676 )
677 };
678
679 for index in from..(page.occupied - count) {
680 {
681 let chunk_index = *unsafe { page.chunks.get_unchecked(index) };
682 let chunk_ref = unsafe { refs.chunks.get_unchecked_mut(chunk_index) };
683
684 chunk_ref.index = index;
685 }
686 }
687 }
688
689 unsafe { page.string.deflate(page.occupied, from, count) };
690 page.occupied -= count;
691
692 length
693 }
694
695 #[inline]
703 pub(super) unsafe fn insert(
704 &mut self,
705 refs: &mut TreeRefs<N>,
706 from: ChildIndex,
707 count: ChildCount,
708 spans: &mut impl Iterator<Item = Length>,
709 indices: &mut &[ByteIndex],
710 tokens: &mut impl Iterator<Item = N::Token>,
711 text: &str,
712 ) -> Length {
713 let self_ref_variant = unsafe { self.into_variant() };
714
715 let page = unsafe { self.as_mut() };
716
717 ld_assert!(
718 from <= page.occupied,
719 "An attempt to insert from non occupied child in Page."
720 );
721 ld_assert!(
722 from + count <= Page::<N>::CAP,
723 "An attempt to insert with overflow in Page."
724 );
725 ld_assert!(count > 0, "An attempt to insert of empty range.");
726
727 unsafe { page.inflate(from, count) };
728
729 unsafe {
730 page.string
731 .rewrite(page.occupied, from, text.as_bytes(), indices, count)
732 };
733
734 *indices = unsafe { &indices[count..] };
735
736 let mut length = 0;
737
738 for index in from..(from + count) {
739 ld_assert!(index < Page::<N>::CAP, "Chunk index is out of bounds.",);
740
741 let new_span = match spans.next() {
742 Some(span) => span,
743
744 None => unsafe { ld_unreachable!("Spans iterator exceeded.") },
745 };
746
747 ld_assert!(new_span > 0, "Zero input span.");
748
749 let new_token = match tokens.next() {
750 Some(token) => token,
751 None => unsafe { ld_unreachable!("Tokens iterator exceeded.") },
752 };
753
754 length += new_span;
755
756 unsafe {
757 *page.spans.get_unchecked_mut(index) = new_span;
758 }
759
760 unsafe {
761 page.tokens.get_unchecked_mut(index).write(new_token);
762 }
763
764 unsafe {
765 *page.chunks.get_unchecked_mut(index) = refs.chunks.insert_raw(ChildCursor {
766 item: self_ref_variant,
767 index,
768 })
769 }
770
771 unsafe {
772 page.caches.get_unchecked_mut(index).write(None);
773 }
774 }
775
776 for index in (from + count)..page.occupied {
777 {
778 let chunk_index = *unsafe { page.chunks.get_unchecked(index) };
779 let chunk_ref = unsafe { refs.chunks.get_unchecked_mut(chunk_index) };
780
781 chunk_ref.index = index;
782 }
783 }
784
785 length
786 }
787}
788
789pub(super) struct PageList<N: Node> {
790 pub(super) first: PageRef<N>,
791 pub(super) last: PageRef<N>,
792}
793
794impl<N: Node> Clone for PageList<N> {
795 #[inline(always)]
796 fn clone(&self) -> Self {
797 *self
798 }
799}
800
801impl<N: Node> Copy for PageList<N> {}
802
803impl<N: Node> PageList<N> {
804 #[inline(always)]
805 pub(super) fn dangling() -> Self {
806 Self {
807 first: PageRef::dangling(),
808 last: PageRef::dangling(),
809 }
810 }
811}