1use crate::{Finalize, pointer_alignment_padding, type_hash::TypeHash};
2use smallvec::SmallVec;
3use std::{
4 alloc::Layout,
5 collections::{HashMap, hash_map::Entry},
6 ops::Range,
7};
8
9#[derive(Debug, Copy, Clone)]
10struct DataStackFinalizer {
11 callback: unsafe fn(*mut ()),
12 layout: Layout,
13}
14
15#[derive(Debug, Copy, Clone)]
16struct DataStackRegisterTag {
17 type_hash: TypeHash,
18 layout: Layout,
19 finalizer: Option<unsafe fn(*mut ())>,
20 padding: u8,
21}
22
23pub struct DataStackToken(usize);
24
25impl DataStackToken {
26 pub unsafe fn new(position: usize) -> Self {
28 Self(position)
29 }
30}
31
32pub struct DataStackRegisterAccess<'a> {
33 stack: &'a mut DataStack,
34 position: usize,
35}
36
37impl<'a> DataStackRegisterAccess<'a> {
38 pub fn type_hash(&self) -> TypeHash {
39 unsafe {
40 self.stack
41 .memory
42 .as_ptr()
43 .add(self.position)
44 .cast::<DataStackRegisterTag>()
45 .read_unaligned()
46 .type_hash
47 }
48 }
49
50 pub fn layout(&self) -> Layout {
51 unsafe {
52 self.stack
53 .memory
54 .as_ptr()
55 .add(self.position)
56 .cast::<DataStackRegisterTag>()
57 .read_unaligned()
58 .layout
59 }
60 }
61
62 pub fn type_hash_layout(&self) -> (TypeHash, Layout) {
63 unsafe {
64 let tag = self
65 .stack
66 .memory
67 .as_ptr()
68 .add(self.position)
69 .cast::<DataStackRegisterTag>()
70 .read_unaligned();
71 (tag.type_hash, tag.layout)
72 }
73 }
74
75 pub fn has_value(&self) -> bool {
76 unsafe {
77 self.stack
78 .memory
79 .as_ptr()
80 .add(self.position)
81 .cast::<DataStackRegisterTag>()
82 .read_unaligned()
83 .finalizer
84 .is_some()
85 }
86 }
87
88 pub fn read<T: 'static>(&'a self) -> Option<&'a T> {
89 unsafe {
90 let tag = self
91 .stack
92 .memory
93 .as_ptr()
94 .add(self.position)
95 .cast::<DataStackRegisterTag>()
96 .read_unaligned();
97 if tag.type_hash == TypeHash::of::<T>() && tag.finalizer.is_some() {
98 self.stack
99 .memory
100 .as_ptr()
101 .add(self.position - tag.layout.size())
102 .cast::<T>()
103 .as_ref()
104 } else {
105 None
106 }
107 }
108 }
109
110 pub fn write<T: 'static>(&'a mut self) -> Option<&'a mut T> {
111 unsafe {
112 let tag = self
113 .stack
114 .memory
115 .as_ptr()
116 .add(self.position)
117 .cast::<DataStackRegisterTag>()
118 .read_unaligned();
119 if tag.type_hash == TypeHash::of::<T>() && tag.finalizer.is_some() {
120 self.stack
121 .memory
122 .as_mut_ptr()
123 .add(self.position - tag.layout.size())
124 .cast::<T>()
125 .as_mut()
126 } else {
127 None
128 }
129 }
130 }
131
132 pub fn take<T: 'static>(&mut self) -> Option<T> {
133 unsafe {
134 let mut tag = self
135 .stack
136 .memory
137 .as_ptr()
138 .add(self.position)
139 .cast::<DataStackRegisterTag>()
140 .read_unaligned();
141 if tag.type_hash == TypeHash::of::<T>() && tag.finalizer.is_some() {
142 tag.finalizer = None;
143 self.stack
144 .memory
145 .as_mut_ptr()
146 .add(self.position)
147 .cast::<DataStackRegisterTag>()
148 .write_unaligned(tag);
149 Some(
150 self.stack
151 .memory
152 .as_ptr()
153 .add(self.position - tag.layout.size())
154 .cast::<T>()
155 .read_unaligned(),
156 )
157 } else {
158 None
159 }
160 }
161 }
162
163 pub fn free(&mut self) -> bool {
164 unsafe {
165 let mut tag = self
166 .stack
167 .memory
168 .as_ptr()
169 .add(self.position)
170 .cast::<DataStackRegisterTag>()
171 .read_unaligned();
172 if let Some(finalizer) = tag.finalizer {
173 (finalizer)(
174 self.stack
175 .memory
176 .as_mut_ptr()
177 .add(self.position - tag.layout.size())
178 .cast::<()>(),
179 );
180 tag.finalizer = None;
181 self.stack
182 .memory
183 .as_mut_ptr()
184 .add(self.position)
185 .cast::<DataStackRegisterTag>()
186 .write_unaligned(tag);
187 true
188 } else {
189 false
190 }
191 }
192 }
193
194 pub fn set<T: Finalize + 'static>(&mut self, value: T) {
195 unsafe {
196 let mut tag = self
197 .stack
198 .memory
199 .as_ptr()
200 .add(self.position)
201 .cast::<DataStackRegisterTag>()
202 .read_unaligned();
203 if tag.type_hash == TypeHash::of::<T>() {
204 if let Some(finalizer) = tag.finalizer {
205 (finalizer)(
206 self.stack
207 .memory
208 .as_mut_ptr()
209 .add(self.position - tag.layout.size())
210 .cast::<()>(),
211 );
212 } else {
213 tag.finalizer = Some(T::finalize_raw);
214 }
215 self.stack
216 .memory
217 .as_mut_ptr()
218 .add(self.position - tag.layout.size())
219 .cast::<T>()
220 .write_unaligned(value);
221 self.stack
222 .memory
223 .as_mut_ptr()
224 .add(self.position)
225 .cast::<DataStackRegisterTag>()
226 .write_unaligned(tag);
227 }
228 }
229 }
230
231 pub fn move_to(&mut self, other: &mut Self) {
232 if self.position == other.position {
233 return;
234 }
235 unsafe {
236 let mut tag = self
237 .stack
238 .memory
239 .as_ptr()
240 .add(self.position)
241 .cast::<DataStackRegisterTag>()
242 .read_unaligned();
243 let other_tag = other
244 .stack
245 .memory
246 .as_ptr()
247 .add(self.position)
248 .cast::<DataStackRegisterTag>()
249 .read_unaligned();
250 if tag.type_hash == other_tag.type_hash && tag.layout == other_tag.layout {
251 if let Some(finalizer) = other_tag.finalizer {
252 (finalizer)(
253 self.stack
254 .memory
255 .as_mut_ptr()
256 .add(other.position - other_tag.layout.size())
257 .cast::<()>(),
258 );
259 }
260 tag.finalizer = None;
261 let source = self
262 .stack
263 .memory
264 .as_ptr()
265 .add(self.position - tag.layout.size());
266 let target = self
267 .stack
268 .memory
269 .as_mut_ptr()
270 .add(other.position - other_tag.layout.size());
271 target.copy_from(source, tag.layout.size());
272 self.stack
273 .memory
274 .as_mut_ptr()
275 .add(self.position)
276 .cast::<DataStackRegisterTag>()
277 .write_unaligned(tag);
278 }
279 }
280 }
281}
282
283#[derive(Debug, Default, Copy, Clone, PartialEq, Eq)]
284pub enum DataStackMode {
285 Values,
286 Registers,
287 #[default]
288 Mixed,
289}
290
291impl DataStackMode {
292 pub fn allows_values(self) -> bool {
293 matches!(self, Self::Values | Self::Mixed)
294 }
295
296 pub fn allows_registers(self) -> bool {
297 matches!(self, Self::Registers | Self::Mixed)
298 }
299}
300
301pub enum DataStackVisitedItem<'a> {
302 Value {
303 type_hash: TypeHash,
304 layout: Layout,
305 data: &'a [u8],
306 range: Range<usize>,
307 },
308 Register {
309 type_hash: TypeHash,
310 layout: Layout,
311 data: &'a [u8],
312 range: Range<usize>,
313 valid: bool,
314 },
315}
316
317pub struct DataStack {
318 memory: Vec<u8>,
319 position: usize,
320 mode: DataStackMode,
321 finalizers: HashMap<TypeHash, DataStackFinalizer>,
322 registers: Vec<usize>,
323 drop: bool,
324}
325
326impl Drop for DataStack {
327 fn drop(&mut self) {
328 if self.drop {
329 self.restore(DataStackToken(0));
330 }
331 }
332}
333
334impl DataStack {
335 pub fn new(mut capacity: usize, mode: DataStackMode) -> Self {
336 capacity = capacity.next_power_of_two();
337 Self {
338 memory: vec![0; capacity],
339 position: 0,
340 mode,
341 finalizers: Default::default(),
342 registers: vec![],
343 drop: true,
344 }
345 }
346
347 pub fn position(&self) -> usize {
348 self.position
349 }
350
351 pub fn size(&self) -> usize {
352 self.memory.len()
353 }
354
355 pub fn available(&self) -> usize {
356 self.size().saturating_sub(self.position)
357 }
358
359 pub fn as_bytes(&self) -> &[u8] {
360 &self.memory[0..self.position]
361 }
362
363 pub fn visit(&self, mut f: impl FnMut(DataStackVisitedItem) -> bool) {
364 let type_layout = Layout::new::<TypeHash>().pad_to_align();
365 let tag_layout = Layout::new::<DataStackRegisterTag>().pad_to_align();
366 let mut position = self.position;
367 while position > 0 {
368 if position < type_layout.size() {
369 return;
370 }
371 position -= type_layout.size();
372 let type_hash = unsafe {
373 self.memory
374 .as_ptr()
375 .add(position)
376 .cast::<TypeHash>()
377 .read_unaligned()
378 };
379 if type_hash == TypeHash::of::<DataStackRegisterTag>() {
380 if position < tag_layout.size() {
381 return;
382 }
383 position -= tag_layout.size();
384 let tag = unsafe {
385 self.memory
386 .as_ptr()
387 .add(position)
388 .cast::<DataStackRegisterTag>()
389 .read_unaligned()
390 };
391 if position < tag.layout.size() {
392 return;
393 }
394 position -= tag.layout.size();
395 let range = position..(position + tag.layout.size());
396 let status = f(DataStackVisitedItem::Register {
397 type_hash: tag.type_hash,
398 layout: tag.layout,
399 data: &self.memory[range.clone()],
400 range,
401 valid: tag.finalizer.is_some(),
402 });
403 if !status {
404 return;
405 }
406 position -= tag.padding as usize;
407 } else if let Some(finalizer) = self.finalizers.get(&type_hash) {
408 if position < finalizer.layout.size() {
409 return;
410 }
411 position -= finalizer.layout.size();
412 let range = position..(position + finalizer.layout.size());
413 let status = f(DataStackVisitedItem::Value {
414 type_hash,
415 layout: finalizer.layout,
416 data: &self.memory[range.clone()],
417 range,
418 });
419 if !status {
420 return;
421 }
422 }
423 }
424 }
425
426 pub fn push<T: Finalize + Sized + 'static>(&mut self, value: T) -> bool {
427 if !self.mode.allows_values() {
428 return false;
429 }
430 let value_layout = Layout::new::<T>().pad_to_align();
431 let type_layout = Layout::new::<TypeHash>().pad_to_align();
432 if self.position + value_layout.size() + type_layout.size() > self.size() {
433 return false;
434 }
435 let type_hash = TypeHash::of::<T>();
436 self.finalizers
437 .entry(type_hash)
438 .or_insert(DataStackFinalizer {
439 callback: T::finalize_raw,
440 layout: value_layout,
441 });
442 unsafe {
443 self.memory
444 .as_mut_ptr()
445 .add(self.position)
446 .cast::<T>()
447 .write_unaligned(value);
448 self.position += value_layout.size();
449 self.memory
450 .as_mut_ptr()
451 .add(self.position)
452 .cast::<TypeHash>()
453 .write_unaligned(type_hash);
454 self.position += type_layout.size();
455 }
456 true
457 }
458
459 pub unsafe fn push_raw(
461 &mut self,
462 layout: Layout,
463 type_hash: TypeHash,
464 finalizer: unsafe fn(*mut ()),
465 data: &[u8],
466 ) -> bool {
467 if !self.mode.allows_values() {
468 return false;
469 }
470 let value_layout = layout.pad_to_align();
471 let type_layout = Layout::new::<TypeHash>().pad_to_align();
472 if data.len() != value_layout.size()
473 && self.position + value_layout.size() + type_layout.size() > self.size()
474 {
475 return false;
476 }
477 self.finalizers
478 .entry(type_hash)
479 .or_insert(DataStackFinalizer {
480 callback: finalizer,
481 layout: value_layout,
482 });
483 self.memory[self.position..(self.position + value_layout.size())].copy_from_slice(data);
484 self.position += value_layout.size();
485 unsafe {
486 self.memory
487 .as_mut_ptr()
488 .add(self.position)
489 .cast::<TypeHash>()
490 .write_unaligned(type_hash)
491 };
492 self.position += type_layout.size();
493 true
494 }
495
496 pub fn push_register<T: Finalize + 'static>(&mut self) -> Option<usize> {
497 unsafe { self.push_register_raw(TypeHash::of::<T>(), Layout::new::<T>().pad_to_align()) }
498 }
499
500 pub fn push_register_value<T: Finalize + 'static>(&mut self, value: T) -> Option<usize> {
501 let result = self.push_register::<T>()?;
502 let mut access = self.access_register(result)?;
503 access.set(value);
504 Some(result)
505 }
506
507 pub unsafe fn push_register_raw(
509 &mut self,
510 type_hash: TypeHash,
511 value_layout: Layout,
512 ) -> Option<usize> {
513 if !self.mode.allows_registers() {
514 return None;
515 }
516 let tag_layout = Layout::new::<DataStackRegisterTag>().pad_to_align();
517 let type_layout = Layout::new::<TypeHash>().pad_to_align();
518 let padding = unsafe { self.alignment_padding(value_layout.align()) };
519 if self.position + padding + value_layout.size() + tag_layout.size() + type_layout.size()
520 > self.size()
521 {
522 return None;
523 }
524 unsafe {
525 self.position += padding + value_layout.size();
526 let position = self.position;
527 self.memory
528 .as_mut_ptr()
529 .add(self.position)
530 .cast::<DataStackRegisterTag>()
531 .write_unaligned(DataStackRegisterTag {
532 type_hash,
533 layout: value_layout,
534 finalizer: None,
535 padding: padding as u8,
536 });
537 self.position += tag_layout.size();
538 self.memory
539 .as_mut_ptr()
540 .add(self.position)
541 .cast::<TypeHash>()
542 .write_unaligned(TypeHash::of::<DataStackRegisterTag>());
543 self.position += type_layout.size();
544 self.registers.push(position);
545 Some(self.registers.len() - 1)
546 }
547 }
548
549 pub fn push_stack(&mut self, mut other: Self) -> Result<(), Self> {
550 if self.available() < other.position {
551 return Err(other);
552 }
553 self.memory[self.position..(self.position + other.position)]
554 .copy_from_slice(&other.memory[0..other.position]);
555 self.position += other.position;
556 self.finalizers
557 .extend(other.finalizers.iter().map(|(key, value)| {
558 (
559 *key,
560 DataStackFinalizer {
561 callback: value.callback,
562 layout: value.layout,
563 },
564 )
565 }));
566 unsafe { other.prevent_drop() };
567 Ok(())
568 }
569
570 pub fn push_from_register(&mut self, register: &mut DataStackRegisterAccess) -> bool {
571 if !self.mode.allows_values() {
572 return false;
573 }
574 let type_layout = Layout::new::<TypeHash>().pad_to_align();
575 let mut tag = unsafe {
576 register
577 .stack
578 .memory
579 .as_ptr()
580 .add(register.position)
581 .cast::<DataStackRegisterTag>()
582 .read_unaligned()
583 };
584 if self.position + tag.layout.size() + type_layout.size() > self.size() {
585 return false;
586 }
587 if let Entry::Vacant(e) = self.finalizers.entry(tag.type_hash)
588 && let Some(finalizer) = tag.finalizer
589 {
590 e.insert(DataStackFinalizer {
591 callback: finalizer,
592 layout: tag.layout,
593 });
594 }
595 tag.finalizer = None;
596 unsafe {
597 let source = register
598 .stack
599 .memory
600 .as_ptr()
601 .add(register.position - tag.layout.size());
602 let target = self.memory.as_mut_ptr().add(self.position);
603 target.copy_from(source, tag.layout.size());
604 self.position += tag.layout.size();
605 self.memory
606 .as_mut_ptr()
607 .add(self.position)
608 .cast::<TypeHash>()
609 .write_unaligned(tag.type_hash);
610 self.position += type_layout.size();
611 register
612 .stack
613 .memory
614 .as_mut_ptr()
615 .add(register.position)
616 .cast::<DataStackRegisterTag>()
617 .write_unaligned(tag);
618 }
619 true
620 }
621
622 pub fn pop<T: Sized + 'static>(&mut self) -> Option<T> {
623 if !self.mode.allows_values() {
624 return None;
625 }
626 let type_layout = Layout::new::<TypeHash>().pad_to_align();
627 let value_layout = Layout::new::<T>().pad_to_align();
628 if self.position < type_layout.size() + value_layout.size() {
629 return None;
630 }
631 let type_hash = unsafe {
632 self.memory
633 .as_mut_ptr()
634 .add(self.position - type_layout.size())
635 .cast::<TypeHash>()
636 .read_unaligned()
637 };
638 if type_hash != TypeHash::of::<T>() || type_hash == TypeHash::of::<DataStackRegisterTag>() {
639 return None;
640 }
641 self.position -= type_layout.size();
642 let result = unsafe {
643 self.memory
644 .as_ptr()
645 .add(self.position - value_layout.size())
646 .cast::<T>()
647 .read_unaligned()
648 };
649 self.position -= value_layout.size();
650 Some(result)
651 }
652
653 #[allow(clippy::type_complexity)]
655 pub unsafe fn pop_raw(&mut self) -> Option<(Layout, TypeHash, unsafe fn(*mut ()), Vec<u8>)> {
656 if !self.mode.allows_values() {
657 return None;
658 }
659 let type_layout = Layout::new::<TypeHash>().pad_to_align();
660 if self.position < type_layout.size() {
661 return None;
662 }
663 let type_hash = unsafe {
664 self.memory
665 .as_mut_ptr()
666 .add(self.position - type_layout.size())
667 .cast::<TypeHash>()
668 .read_unaligned()
669 };
670 if type_hash == TypeHash::of::<DataStackRegisterTag>() {
671 return None;
672 }
673 let finalizer = self.finalizers.get(&type_hash)?;
674 if self.position < type_layout.size() + finalizer.layout.size() {
675 return None;
676 }
677 self.position -= type_layout.size();
678 let data = self.memory[(self.position - finalizer.layout.size())..self.position].to_vec();
679 self.position -= finalizer.layout.size();
680 Some((finalizer.layout, type_hash, finalizer.callback, data))
681 }
682
683 pub fn drop(&mut self) -> bool {
684 if !self.mode.allows_values() {
685 return false;
686 }
687 let type_layout = Layout::new::<TypeHash>().pad_to_align();
688 self.position -= type_layout.size();
689 let type_hash = unsafe {
690 self.memory
691 .as_ptr()
692 .add(self.position)
693 .cast::<TypeHash>()
694 .read_unaligned()
695 };
696 if type_hash == TypeHash::of::<DataStackRegisterTag>() {
697 return false;
698 }
699 if let Some(finalizer) = self.finalizers.get(&type_hash) {
700 self.position -= finalizer.layout.size();
701 unsafe {
702 (finalizer.callback)(self.memory.as_mut_ptr().add(self.position).cast::<()>());
703 }
704 }
705 true
706 }
707
708 pub fn drop_register(&mut self) -> bool {
709 if !self.mode.allows_registers() {
710 return false;
711 }
712 let tag_layout = Layout::new::<DataStackRegisterTag>().pad_to_align();
713 let type_layout = Layout::new::<TypeHash>().pad_to_align();
714 unsafe {
715 let type_hash = self
716 .memory
717 .as_mut_ptr()
718 .add(self.position - type_layout.size())
719 .cast::<TypeHash>()
720 .read_unaligned();
721 if type_hash != TypeHash::of::<DataStackRegisterTag>() {
722 return false;
723 }
724 self.position -= type_layout.size();
725 self.position -= tag_layout.size();
726 let tag = self
727 .memory
728 .as_ptr()
729 .add(self.position)
730 .cast::<DataStackRegisterTag>()
731 .read_unaligned();
732 self.position -= tag.layout.size() - tag.padding as usize;
733 if let Some(finalizer) = tag.finalizer {
734 (finalizer)(self.memory.as_mut_ptr().add(self.position).cast::<()>());
735 }
736 self.registers.pop();
737 }
738 true
739 }
740
741 pub fn pop_stack(&mut self, mut data_count: usize, capacity: Option<usize>) -> Self {
742 let type_layout = Layout::new::<TypeHash>().pad_to_align();
743 let mut size = 0;
744 let mut position = self.position;
745 let mut finalizers = HashMap::new();
746 while data_count > 0 && position > 0 {
747 data_count -= 1;
748 position -= type_layout.size();
749 size += type_layout.size();
750 let type_hash = unsafe {
751 self.memory
752 .as_mut_ptr()
753 .add(position)
754 .cast::<TypeHash>()
755 .read_unaligned()
756 };
757 if let Some(finalizer) = self.finalizers.get(&type_hash) {
758 position -= finalizer.layout.size();
759 size += finalizer.layout.size();
760 finalizers.insert(
761 type_hash,
762 DataStackFinalizer {
763 callback: finalizer.callback,
764 layout: finalizer.layout,
765 },
766 );
767 }
768 }
769 let mut result = Self::new(capacity.unwrap_or(size).max(size), self.mode);
770 result.memory[0..size].copy_from_slice(&self.memory[position..self.position]);
771 result.finalizers.extend(finalizers);
772 self.position = position;
773 result.position = size;
774 result
775 }
776
777 pub fn pop_to_register(&mut self, register: &mut DataStackRegisterAccess) -> bool {
778 if !self.mode.allows_values() {
779 return false;
780 }
781 let type_layout = Layout::new::<TypeHash>().pad_to_align();
782 if self.position < type_layout.size() {
783 return false;
784 }
785 let type_hash = unsafe {
786 self.memory
787 .as_mut_ptr()
788 .add(self.position - type_layout.size())
789 .cast::<TypeHash>()
790 .read_unaligned()
791 };
792 let mut tag = unsafe {
793 register
794 .stack
795 .memory
796 .as_ptr()
797 .add(register.position)
798 .cast::<DataStackRegisterTag>()
799 .read_unaligned()
800 };
801 if type_hash != tag.type_hash || type_hash == TypeHash::of::<DataStackRegisterTag>() {
802 return false;
803 }
804 if self.position < type_layout.size() + tag.layout.size() {
805 return false;
806 }
807 let finalizer = match self.finalizers.get(&type_hash) {
808 Some(finalizer) => finalizer.callback,
809 None => return false,
810 };
811 unsafe {
812 if let Some(finalizer) = tag.finalizer {
813 (finalizer)(
814 register
815 .stack
816 .memory
817 .as_mut_ptr()
818 .add(register.position - tag.layout.size())
819 .cast::<()>(),
820 );
821 }
822 tag.finalizer = Some(finalizer);
823 let source = self
824 .memory
825 .as_ptr()
826 .add(self.position - type_layout.size() - tag.layout.size());
827 let target = register
828 .stack
829 .memory
830 .as_mut_ptr()
831 .add(register.position - tag.layout.size());
832 target.copy_from(source, tag.layout.size());
833 register
834 .stack
835 .memory
836 .as_mut_ptr()
837 .add(register.position)
838 .cast::<DataStackRegisterTag>()
839 .write_unaligned(tag);
840 }
841 self.position -= type_layout.size();
842 self.position -= tag.layout.size();
843 true
844 }
845
846 pub fn store(&self) -> DataStackToken {
847 DataStackToken(self.position)
848 }
849
850 pub fn restore(&mut self, token: DataStackToken) {
851 let type_layout = Layout::new::<TypeHash>().pad_to_align();
852 let tag_layout = Layout::new::<DataStackRegisterTag>().pad_to_align();
853 let tag_type_hash = TypeHash::of::<DataStackRegisterTag>();
854 while self.position > token.0 {
855 self.position -= type_layout.size();
856 let type_hash = unsafe {
857 self.memory
858 .as_ptr()
859 .add(self.position)
860 .cast::<TypeHash>()
861 .read_unaligned()
862 };
863 if type_hash == tag_type_hash {
864 unsafe {
865 let tag = self
866 .memory
867 .as_ptr()
868 .add(self.position - tag_layout.size())
869 .cast::<DataStackRegisterTag>()
870 .read_unaligned();
871 self.position -= tag_layout.size();
872 self.position -= tag.layout.size();
873 if let Some(finalizer) = tag.finalizer {
874 (finalizer)(self.memory.as_mut_ptr().add(self.position).cast::<()>());
875 }
876 self.position -= tag.padding as usize;
877 self.registers.pop();
878 }
879 } else if let Some(finalizer) = self.finalizers.get(&type_hash) {
880 self.position -= finalizer.layout.size();
881 unsafe {
882 (finalizer.callback)(self.memory.as_mut_ptr().add(self.position).cast::<()>());
883 }
884 }
885 }
886 }
887
888 pub fn reverse(&mut self, token: DataStackToken) {
889 let size = self.position.saturating_sub(token.0);
890 let mut meta_data = SmallVec::<[_; 8]>::with_capacity(8);
891 let mut meta_registers = 0;
892 let type_layout = Layout::new::<TypeHash>().pad_to_align();
893 let tag_layout = Layout::new::<DataStackRegisterTag>().pad_to_align();
894 let tag_type_hash = TypeHash::of::<DataStackRegisterTag>();
895 let mut position = self.position;
896 while position > token.0 {
897 position -= type_layout.size();
898 let type_hash = unsafe {
899 self.memory
900 .as_mut_ptr()
901 .add(position)
902 .cast::<TypeHash>()
903 .read_unaligned()
904 };
905 if type_hash == tag_type_hash {
906 unsafe {
907 let tag = self
908 .memory
909 .as_ptr()
910 .add(self.position - tag_layout.size())
911 .cast::<DataStackRegisterTag>()
912 .read_unaligned();
913 position -= tag_layout.size();
914 position -= tag.layout.size();
915 meta_data.push((
916 position - token.0,
917 type_layout.size() + tag_layout.size() + tag.layout.size(),
918 ));
919 meta_registers += 1;
920 }
921 } else if let Some(finalizer) = self.finalizers.get(&type_hash) {
922 position -= finalizer.layout.size();
923 meta_data.push((
924 position - token.0,
925 type_layout.size() + finalizer.layout.size(),
926 ));
927 }
928 }
929 if meta_data.len() <= 1 {
930 return;
931 }
932 let mut memory = SmallVec::<[_; 256]>::new();
933 memory.resize(size, 0);
934 memory.copy_from_slice(&self.memory[token.0..self.position]);
935 for (source_position, size) in meta_data {
936 self.memory[position..(position + size)]
937 .copy_from_slice(&memory[source_position..(source_position + size)]);
938 position += size;
939 }
940 let start = self.registers.len() - meta_registers;
941 self.registers[start..].reverse();
942 }
943
944 pub fn peek(&self) -> Option<TypeHash> {
945 if self.position == 0 {
946 return None;
947 }
948 let type_layout = Layout::new::<TypeHash>().pad_to_align();
949 Some(unsafe {
950 self.memory
951 .as_ptr()
952 .add(self.position - type_layout.size())
953 .cast::<TypeHash>()
954 .read_unaligned()
955 })
956 }
957
958 pub fn registers_count(&self) -> usize {
959 self.registers.len()
960 }
961
962 pub fn access_register(&'_ mut self, index: usize) -> Option<DataStackRegisterAccess<'_>> {
963 let position = *self.registers.get(index)?;
964 Some(DataStackRegisterAccess {
965 stack: self,
966 position,
967 })
968 }
969
970 pub fn access_registers_pair(
971 &'_ mut self,
972 a: usize,
973 b: usize,
974 ) -> Option<(DataStackRegisterAccess<'_>, DataStackRegisterAccess<'_>)> {
975 if a == b {
976 return None;
977 }
978 let position_a = *self.registers.get(a)?;
979 let position_b = *self.registers.get(b)?;
980 unsafe {
981 Some((
982 DataStackRegisterAccess {
983 stack: (self as *mut Self).as_mut()?,
984 position: position_a,
985 },
986 DataStackRegisterAccess {
987 stack: (self as *mut Self).as_mut()?,
988 position: position_b,
989 },
990 ))
991 }
992 }
993
994 pub unsafe fn prevent_drop(&mut self) {
996 self.drop = false;
997 }
998
999 #[inline]
1001 unsafe fn alignment_padding(&self, alignment: usize) -> usize {
1002 pointer_alignment_padding(
1003 unsafe { self.memory.as_ptr().add(self.position) },
1004 alignment,
1005 )
1006 }
1007}
1008
1009pub trait DataStackPack: Sized {
1010 fn stack_push(self, stack: &mut DataStack);
1011
1012 fn stack_push_reversed(self, stack: &mut DataStack) {
1013 let token = stack.store();
1014 self.stack_push(stack);
1015 stack.reverse(token);
1016 }
1017
1018 fn stack_pop(stack: &mut DataStack) -> Self;
1019
1020 fn pack_types() -> Vec<TypeHash>;
1021}
1022
1023impl DataStackPack for () {
1024 fn stack_push(self, _: &mut DataStack) {}
1025
1026 fn stack_pop(_: &mut DataStack) -> Self {}
1027
1028 fn pack_types() -> Vec<TypeHash> {
1029 vec![]
1030 }
1031}
1032
1033macro_rules! impl_data_stack_tuple {
1034 ($($type:ident),+) => {
1035 impl<$($type: 'static),+> DataStackPack for ($($type,)+) {
1036 #[allow(non_snake_case)]
1037 fn stack_push(self, stack: &mut DataStack) {
1038 let ($( $type, )+) = self;
1039 $( stack.push($type); )+
1040 }
1041
1042 #[allow(non_snake_case)]
1043 fn stack_pop(stack: &mut DataStack) -> Self {
1044 ($(
1045 stack.pop::<$type>().unwrap_or_else(
1046 || panic!("Could not pop data of type: {}", std::any::type_name::<$type>())
1047 ),
1048 )+)
1049 }
1050
1051 #[allow(non_snake_case)]
1052 fn pack_types() -> Vec<TypeHash> {
1053 vec![ $( TypeHash::of::<$type>() ),+ ]
1054 }
1055 }
1056 };
1057}
1058
1059impl_data_stack_tuple!(A);
1060impl_data_stack_tuple!(A, B);
1061impl_data_stack_tuple!(A, B, C);
1062impl_data_stack_tuple!(A, B, C, D);
1063impl_data_stack_tuple!(A, B, C, D, E);
1064impl_data_stack_tuple!(A, B, C, D, E, F);
1065impl_data_stack_tuple!(A, B, C, D, E, F, G);
1066impl_data_stack_tuple!(A, B, C, D, E, F, G, H);
1067impl_data_stack_tuple!(A, B, C, D, E, F, G, H, I);
1068impl_data_stack_tuple!(A, B, C, D, E, F, G, H, I, J);
1069impl_data_stack_tuple!(A, B, C, D, E, F, G, H, I, J, K);
1070impl_data_stack_tuple!(A, B, C, D, E, F, G, H, I, J, K, L);
1071impl_data_stack_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M);
1072impl_data_stack_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M, N);
1073impl_data_stack_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O);
1074impl_data_stack_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P);
1075
1076#[cfg(test)]
1077mod tests {
1078 use crate::{
1079 data_stack::{DataStack, DataStackMode},
1080 type_hash::TypeHash,
1081 };
1082 use std::{alloc::Layout, cell::RefCell, rc::Rc};
1083
1084 #[test]
1085 fn test_data_stack() {
1086 struct Droppable(Rc<RefCell<bool>>);
1087
1088 impl Drop for Droppable {
1089 fn drop(&mut self) {
1090 *self.0.borrow_mut() = true;
1091 }
1092 }
1093
1094 let dropped = Rc::new(RefCell::new(false));
1095 let mut stack = DataStack::new(10240, DataStackMode::Values);
1096 assert_eq!(stack.size(), 16384);
1097 assert_eq!(stack.position(), 0);
1098 stack.push(Droppable(dropped.clone()));
1099 assert_eq!(
1100 stack.position(),
1101 if cfg!(feature = "typehash_debug_name") {
1102 32
1103 } else {
1104 16
1105 }
1106 );
1107 let token = stack.store();
1108 stack.push(42_usize);
1109 assert_eq!(
1110 stack.position(),
1111 if cfg!(feature = "typehash_debug_name") {
1112 64
1113 } else {
1114 32
1115 }
1116 );
1117 stack.push(true);
1118 assert_eq!(
1119 stack.position(),
1120 if cfg!(feature = "typehash_debug_name") {
1121 89
1122 } else {
1123 41
1124 }
1125 );
1126 stack.push(4.2_f32);
1127 assert_eq!(
1128 stack.position(),
1129 if cfg!(feature = "typehash_debug_name") {
1130 117
1131 } else {
1132 53
1133 }
1134 );
1135 assert!(!*dropped.borrow());
1136 assert!(stack.pop::<()>().is_none());
1137 stack.push(());
1138 assert_eq!(
1139 stack.position(),
1140 if cfg!(feature = "typehash_debug_name") {
1141 141
1142 } else {
1143 61
1144 }
1145 );
1146 stack.reverse(token);
1147 let mut stack2 = stack.pop_stack(2, None);
1148 assert_eq!(
1149 stack.position(),
1150 if cfg!(feature = "typehash_debug_name") {
1151 84
1152 } else {
1153 36
1154 }
1155 );
1156 assert_eq!(
1157 stack2.size(),
1158 if cfg!(feature = "typehash_debug_name") {
1159 64
1160 } else {
1161 32
1162 }
1163 );
1164 assert_eq!(
1165 stack2.position(),
1166 if cfg!(feature = "typehash_debug_name") {
1167 57
1168 } else {
1169 25
1170 }
1171 );
1172 assert_eq!(stack2.pop::<usize>().unwrap(), 42_usize);
1173 assert_eq!(
1174 stack2.position(),
1175 if cfg!(feature = "typehash_debug_name") {
1176 25
1177 } else {
1178 9
1179 }
1180 );
1181 assert!(stack2.pop::<bool>().unwrap());
1182 assert_eq!(stack2.position(), 0);
1183 stack2.push(true);
1184 stack2.push(42_usize);
1185 stack.push_stack(stack2).ok().unwrap();
1186 assert_eq!(
1187 stack.position(),
1188 if cfg!(feature = "typehash_debug_name") {
1189 141
1190 } else {
1191 61
1192 }
1193 );
1194 assert_eq!(stack.pop::<usize>().unwrap(), 42_usize);
1195 assert_eq!(
1196 stack.position(),
1197 if cfg!(feature = "typehash_debug_name") {
1198 109
1199 } else {
1200 45
1201 }
1202 );
1203 assert!(stack.pop::<bool>().unwrap());
1204 assert_eq!(
1205 stack.position(),
1206 if cfg!(feature = "typehash_debug_name") {
1207 84
1208 } else {
1209 36
1210 }
1211 );
1212 assert_eq!(stack.pop::<f32>().unwrap(), 4.2_f32);
1213 assert_eq!(
1214 stack.position(),
1215 if cfg!(feature = "typehash_debug_name") {
1216 56
1217 } else {
1218 24
1219 }
1220 );
1221 stack.pop::<()>().unwrap();
1222 assert_eq!(
1223 stack.position(),
1224 if cfg!(feature = "typehash_debug_name") {
1225 32
1226 } else {
1227 16
1228 }
1229 );
1230 stack.push(42_usize);
1231 unsafe {
1232 let (layout, type_hash, finalizer, data) = stack.pop_raw().unwrap();
1233 assert_eq!(layout, Layout::new::<usize>().pad_to_align());
1234 assert_eq!(type_hash, TypeHash::of::<usize>());
1235 assert!(stack.push_raw(layout, type_hash, finalizer, &data));
1236 assert_eq!(
1237 stack.position(),
1238 if cfg!(feature = "typehash_debug_name") {
1239 64
1240 } else {
1241 32
1242 }
1243 );
1244 assert_eq!(stack.pop::<usize>().unwrap(), 42_usize);
1245 assert_eq!(
1246 stack.position(),
1247 if cfg!(feature = "typehash_debug_name") {
1248 32
1249 } else {
1250 16
1251 }
1252 );
1253 }
1254 drop(stack);
1255 assert!(*dropped.borrow());
1256
1257 let mut stack = DataStack::new(10240, DataStackMode::Registers);
1258 assert_eq!(stack.size(), 16384);
1259 stack.push_register::<bool>().unwrap();
1260 stack.drop_register();
1261 let a = stack.push_register_value(true).unwrap();
1262 assert!(*stack.access_register(a).unwrap().read::<bool>().unwrap());
1263 assert!(stack.access_register(a).unwrap().take::<bool>().unwrap());
1264 assert!(!stack.access_register(a).unwrap().has_value());
1265 let b = stack.push_register_value(0usize).unwrap();
1266 stack.access_register(b).unwrap().set(42usize);
1267 assert_eq!(
1268 *stack.access_register(b).unwrap().read::<usize>().unwrap(),
1269 42
1270 );
1271 }
1272}