1use crate::{Finalize, pointer_alignment_padding, type_hash::TypeHash};
2use smallvec::SmallVec;
3use std::{
4 alloc::Layout,
5 collections::{HashMap, hash_map::Entry},
6 ops::Range,
7};
8
9#[derive(Debug, Copy, Clone)]
10struct DataStackFinalizer {
11 callback: unsafe fn(*mut ()),
12 layout: Layout,
13}
14
15#[derive(Debug, Copy, Clone)]
16struct DataStackRegisterTag {
17 type_hash: TypeHash,
18 layout: Layout,
19 finalizer: Option<unsafe fn(*mut ())>,
20 padding: u8,
21}
22
23pub struct DataStackToken(usize);
24
25impl DataStackToken {
26 pub unsafe fn new(position: usize) -> Self {
28 Self(position)
29 }
30}
31
32pub struct DataStackRegisterAccess<'a> {
33 stack: &'a mut DataStack,
34 position: usize,
35}
36
37impl<'a> DataStackRegisterAccess<'a> {
38 pub fn type_hash(&self) -> TypeHash {
39 unsafe {
40 self.stack
41 .memory
42 .as_ptr()
43 .add(self.position)
44 .cast::<DataStackRegisterTag>()
45 .read_unaligned()
46 .type_hash
47 }
48 }
49
50 pub fn layout(&self) -> Layout {
51 unsafe {
52 self.stack
53 .memory
54 .as_ptr()
55 .add(self.position)
56 .cast::<DataStackRegisterTag>()
57 .read_unaligned()
58 .layout
59 }
60 }
61
62 pub fn type_hash_layout(&self) -> (TypeHash, Layout) {
63 unsafe {
64 let tag = self
65 .stack
66 .memory
67 .as_ptr()
68 .add(self.position)
69 .cast::<DataStackRegisterTag>()
70 .read_unaligned();
71 (tag.type_hash, tag.layout)
72 }
73 }
74
75 pub fn has_value(&self) -> bool {
76 unsafe {
77 self.stack
78 .memory
79 .as_ptr()
80 .add(self.position)
81 .cast::<DataStackRegisterTag>()
82 .read_unaligned()
83 .finalizer
84 .is_some()
85 }
86 }
87
88 pub fn read<T: 'static>(&'a self) -> Option<&'a T> {
89 unsafe {
90 let tag = self
91 .stack
92 .memory
93 .as_ptr()
94 .add(self.position)
95 .cast::<DataStackRegisterTag>()
96 .read_unaligned();
97 if tag.type_hash == TypeHash::of::<T>() && tag.finalizer.is_some() {
98 self.stack
99 .memory
100 .as_ptr()
101 .add(self.position - tag.layout.size())
102 .cast::<T>()
103 .as_ref()
104 } else {
105 None
106 }
107 }
108 }
109
110 pub fn write<T: 'static>(&'a mut self) -> Option<&'a mut T> {
111 unsafe {
112 let tag = self
113 .stack
114 .memory
115 .as_ptr()
116 .add(self.position)
117 .cast::<DataStackRegisterTag>()
118 .read_unaligned();
119 if tag.type_hash == TypeHash::of::<T>() && tag.finalizer.is_some() {
120 self.stack
121 .memory
122 .as_mut_ptr()
123 .add(self.position - tag.layout.size())
124 .cast::<T>()
125 .as_mut()
126 } else {
127 None
128 }
129 }
130 }
131
132 pub fn take<T: 'static>(&mut self) -> Option<T> {
133 unsafe {
134 let mut tag = self
135 .stack
136 .memory
137 .as_ptr()
138 .add(self.position)
139 .cast::<DataStackRegisterTag>()
140 .read_unaligned();
141 if tag.type_hash == TypeHash::of::<T>() && tag.finalizer.is_some() {
142 tag.finalizer = None;
143 self.stack
144 .memory
145 .as_mut_ptr()
146 .add(self.position)
147 .cast::<DataStackRegisterTag>()
148 .write_unaligned(tag);
149 Some(
150 self.stack
151 .memory
152 .as_ptr()
153 .add(self.position - tag.layout.size())
154 .cast::<T>()
155 .read_unaligned(),
156 )
157 } else {
158 None
159 }
160 }
161 }
162
163 pub fn free(&mut self) -> bool {
164 unsafe {
165 let mut tag = self
166 .stack
167 .memory
168 .as_ptr()
169 .add(self.position)
170 .cast::<DataStackRegisterTag>()
171 .read_unaligned();
172 if let Some(finalizer) = tag.finalizer {
173 (finalizer)(
174 self.stack
175 .memory
176 .as_mut_ptr()
177 .add(self.position - tag.layout.size())
178 .cast::<()>(),
179 );
180 tag.finalizer = None;
181 self.stack
182 .memory
183 .as_mut_ptr()
184 .add(self.position)
185 .cast::<DataStackRegisterTag>()
186 .write_unaligned(tag);
187 true
188 } else {
189 false
190 }
191 }
192 }
193
194 pub fn set<T: Finalize + 'static>(&mut self, value: T) {
195 unsafe {
196 let mut tag = self
197 .stack
198 .memory
199 .as_ptr()
200 .add(self.position)
201 .cast::<DataStackRegisterTag>()
202 .read_unaligned();
203 if tag.type_hash == TypeHash::of::<T>() {
204 if let Some(finalizer) = tag.finalizer {
205 (finalizer)(
206 self.stack
207 .memory
208 .as_mut_ptr()
209 .add(self.position - tag.layout.size())
210 .cast::<()>(),
211 );
212 } else {
213 tag.finalizer = Some(T::finalize_raw);
214 }
215 self.stack
216 .memory
217 .as_mut_ptr()
218 .add(self.position - tag.layout.size())
219 .cast::<T>()
220 .write_unaligned(value);
221 self.stack
222 .memory
223 .as_mut_ptr()
224 .add(self.position)
225 .cast::<DataStackRegisterTag>()
226 .write_unaligned(tag);
227 }
228 }
229 }
230
231 pub fn move_to(&mut self, other: &mut Self) {
232 if self.position == other.position {
233 return;
234 }
235 unsafe {
236 let mut tag = self
237 .stack
238 .memory
239 .as_ptr()
240 .add(self.position)
241 .cast::<DataStackRegisterTag>()
242 .read_unaligned();
243 let other_tag = other
244 .stack
245 .memory
246 .as_ptr()
247 .add(self.position)
248 .cast::<DataStackRegisterTag>()
249 .read_unaligned();
250 if tag.type_hash == other_tag.type_hash && tag.layout == other_tag.layout {
251 if let Some(finalizer) = other_tag.finalizer {
252 (finalizer)(
253 self.stack
254 .memory
255 .as_mut_ptr()
256 .add(other.position - other_tag.layout.size())
257 .cast::<()>(),
258 );
259 }
260 tag.finalizer = None;
261 let source = self
262 .stack
263 .memory
264 .as_ptr()
265 .add(self.position - tag.layout.size());
266 let target = self
267 .stack
268 .memory
269 .as_mut_ptr()
270 .add(other.position - other_tag.layout.size());
271 target.copy_from(source, tag.layout.size());
272 self.stack
273 .memory
274 .as_mut_ptr()
275 .add(self.position)
276 .cast::<DataStackRegisterTag>()
277 .write_unaligned(tag);
278 }
279 }
280 }
281}
282
283#[derive(Debug, Default, Copy, Clone, PartialEq, Eq)]
284pub enum DataStackMode {
285 Values,
286 Registers,
287 #[default]
288 Mixed,
289}
290
291impl DataStackMode {
292 pub fn allows_values(self) -> bool {
293 matches!(self, Self::Values | Self::Mixed)
294 }
295
296 pub fn allows_registers(self) -> bool {
297 matches!(self, Self::Registers | Self::Mixed)
298 }
299}
300
301pub struct DataStack {
302 memory: Vec<u8>,
303 position: usize,
304 mode: DataStackMode,
305 finalizers: HashMap<TypeHash, DataStackFinalizer>,
306 registers: Vec<usize>,
307 drop: bool,
308}
309
310impl Drop for DataStack {
311 fn drop(&mut self) {
312 if self.drop {
313 self.restore(DataStackToken(0));
314 }
315 }
316}
317
318impl DataStack {
319 pub fn new(mut capacity: usize, mode: DataStackMode) -> Self {
320 capacity = capacity.next_power_of_two();
321 Self {
322 memory: vec![0; capacity],
323 position: 0,
324 mode,
325 finalizers: Default::default(),
326 registers: vec![],
327 drop: true,
328 }
329 }
330
331 pub fn position(&self) -> usize {
332 self.position
333 }
334
335 pub fn size(&self) -> usize {
336 self.memory.len()
337 }
338
339 pub fn available(&self) -> usize {
340 self.size().saturating_sub(self.position)
341 }
342
343 pub fn as_bytes(&self) -> &[u8] {
344 &self.memory[0..self.position]
345 }
346
347 pub fn visit(&self, mut f: impl FnMut(TypeHash, Layout, &[u8], Range<usize>, bool)) {
348 let type_layout = Layout::new::<TypeHash>().pad_to_align();
349 let tag_layout = Layout::new::<DataStackRegisterTag>().pad_to_align();
350 let mut position = self.position;
351 while position > 0 {
352 if position < type_layout.size() {
353 return;
354 }
355 position -= type_layout.size();
356 let type_hash = unsafe {
357 self.memory
358 .as_ptr()
359 .add(position)
360 .cast::<TypeHash>()
361 .read_unaligned()
362 };
363 if type_hash == TypeHash::of::<DataStackRegisterTag>() {
364 if position < tag_layout.size() {
365 return;
366 }
367 position -= tag_layout.size();
368 let tag = unsafe {
369 self.memory
370 .as_ptr()
371 .add(position)
372 .cast::<DataStackRegisterTag>()
373 .read_unaligned()
374 };
375 if position < tag.layout.size() {
376 return;
377 }
378 position -= tag.layout.size();
379 let range = position..(position + tag.layout.size());
380 f(
381 tag.type_hash,
382 tag.layout,
383 &self.memory[range.clone()],
384 range,
385 tag.finalizer.is_some(),
386 );
387 position -= tag.padding as usize;
388 } else if let Some(finalizer) = self.finalizers.get(&type_hash) {
389 if position < finalizer.layout.size() {
390 return;
391 }
392 position -= finalizer.layout.size();
393 let range = position..(position + finalizer.layout.size());
394 f(
395 type_hash,
396 finalizer.layout,
397 &self.memory[range.clone()],
398 range,
399 true,
400 );
401 }
402 }
403 }
404
405 pub fn push<T: Finalize + Sized + 'static>(&mut self, value: T) -> bool {
406 if !self.mode.allows_values() {
407 return false;
408 }
409 let value_layout = Layout::new::<T>().pad_to_align();
410 let type_layout = Layout::new::<TypeHash>().pad_to_align();
411 if self.position + value_layout.size() + type_layout.size() > self.size() {
412 return false;
413 }
414 let type_hash = TypeHash::of::<T>();
415 self.finalizers
416 .entry(type_hash)
417 .or_insert(DataStackFinalizer {
418 callback: T::finalize_raw,
419 layout: value_layout,
420 });
421 unsafe {
422 self.memory
423 .as_mut_ptr()
424 .add(self.position)
425 .cast::<T>()
426 .write_unaligned(value);
427 self.position += value_layout.size();
428 self.memory
429 .as_mut_ptr()
430 .add(self.position)
431 .cast::<TypeHash>()
432 .write_unaligned(type_hash);
433 self.position += type_layout.size();
434 }
435 true
436 }
437
438 pub unsafe fn push_raw(
440 &mut self,
441 layout: Layout,
442 type_hash: TypeHash,
443 finalizer: unsafe fn(*mut ()),
444 data: &[u8],
445 ) -> bool {
446 if !self.mode.allows_values() {
447 return false;
448 }
449 let value_layout = layout.pad_to_align();
450 let type_layout = Layout::new::<TypeHash>().pad_to_align();
451 if data.len() != value_layout.size()
452 && self.position + value_layout.size() + type_layout.size() > self.size()
453 {
454 return false;
455 }
456 self.finalizers
457 .entry(type_hash)
458 .or_insert(DataStackFinalizer {
459 callback: finalizer,
460 layout: value_layout,
461 });
462 self.memory[self.position..(self.position + value_layout.size())].copy_from_slice(data);
463 self.position += value_layout.size();
464 unsafe {
465 self.memory
466 .as_mut_ptr()
467 .add(self.position)
468 .cast::<TypeHash>()
469 .write_unaligned(type_hash)
470 };
471 self.position += type_layout.size();
472 true
473 }
474
475 pub fn push_register<T: Finalize + 'static>(&mut self) -> Option<usize> {
476 unsafe { self.push_register_raw(TypeHash::of::<T>(), Layout::new::<T>().pad_to_align()) }
477 }
478
479 pub fn push_register_value<T: Finalize + 'static>(&mut self, value: T) -> Option<usize> {
480 let result = self.push_register::<T>()?;
481 let mut access = self.access_register(result)?;
482 access.set(value);
483 Some(result)
484 }
485
486 pub unsafe fn push_register_raw(
488 &mut self,
489 type_hash: TypeHash,
490 value_layout: Layout,
491 ) -> Option<usize> {
492 if !self.mode.allows_registers() {
493 return None;
494 }
495 let tag_layout = Layout::new::<DataStackRegisterTag>().pad_to_align();
496 let type_layout = Layout::new::<TypeHash>().pad_to_align();
497 let padding = unsafe { self.alignment_padding(value_layout.align()) };
498 if self.position + padding + value_layout.size() + tag_layout.size() + type_layout.size()
499 > self.size()
500 {
501 return None;
502 }
503 unsafe {
504 self.position += padding + value_layout.size();
505 let position = self.position;
506 self.memory
507 .as_mut_ptr()
508 .add(self.position)
509 .cast::<DataStackRegisterTag>()
510 .write_unaligned(DataStackRegisterTag {
511 type_hash,
512 layout: value_layout,
513 finalizer: None,
514 padding: padding as u8,
515 });
516 self.position += tag_layout.size();
517 self.memory
518 .as_mut_ptr()
519 .add(self.position)
520 .cast::<TypeHash>()
521 .write_unaligned(TypeHash::of::<DataStackRegisterTag>());
522 self.position += type_layout.size();
523 self.registers.push(position);
524 Some(self.registers.len() - 1)
525 }
526 }
527
528 pub fn push_stack(&mut self, mut other: Self) -> Result<(), Self> {
529 if self.available() < other.position {
530 return Err(other);
531 }
532 self.memory[self.position..(self.position + other.position)]
533 .copy_from_slice(&other.memory[0..other.position]);
534 self.position += other.position;
535 self.finalizers
536 .extend(other.finalizers.iter().map(|(key, value)| {
537 (
538 *key,
539 DataStackFinalizer {
540 callback: value.callback,
541 layout: value.layout,
542 },
543 )
544 }));
545 unsafe { other.prevent_drop() };
546 Ok(())
547 }
548
549 pub fn push_from_register(&mut self, register: &mut DataStackRegisterAccess) -> bool {
550 if !self.mode.allows_values() {
551 return false;
552 }
553 let type_layout = Layout::new::<TypeHash>().pad_to_align();
554 let mut tag = unsafe {
555 register
556 .stack
557 .memory
558 .as_ptr()
559 .add(register.position)
560 .cast::<DataStackRegisterTag>()
561 .read_unaligned()
562 };
563 if self.position + tag.layout.size() + type_layout.size() > self.size() {
564 return false;
565 }
566 if let Entry::Vacant(e) = self.finalizers.entry(tag.type_hash) {
567 if let Some(finalizer) = tag.finalizer {
568 e.insert(DataStackFinalizer {
569 callback: finalizer,
570 layout: tag.layout,
571 });
572 }
573 }
574 tag.finalizer = None;
575 unsafe {
576 let source = register
577 .stack
578 .memory
579 .as_ptr()
580 .add(register.position - tag.layout.size());
581 let target = self.memory.as_mut_ptr().add(self.position);
582 target.copy_from(source, tag.layout.size());
583 self.position += tag.layout.size();
584 self.memory
585 .as_mut_ptr()
586 .add(self.position)
587 .cast::<TypeHash>()
588 .write_unaligned(tag.type_hash);
589 self.position += type_layout.size();
590 register
591 .stack
592 .memory
593 .as_mut_ptr()
594 .add(register.position)
595 .cast::<DataStackRegisterTag>()
596 .write_unaligned(tag);
597 }
598 true
599 }
600
601 pub fn pop<T: Sized + 'static>(&mut self) -> Option<T> {
602 if !self.mode.allows_values() {
603 return None;
604 }
605 let type_layout = Layout::new::<TypeHash>().pad_to_align();
606 let value_layout = Layout::new::<T>().pad_to_align();
607 if self.position < type_layout.size() + value_layout.size() {
608 return None;
609 }
610 let type_hash = unsafe {
611 self.memory
612 .as_mut_ptr()
613 .add(self.position - type_layout.size())
614 .cast::<TypeHash>()
615 .read_unaligned()
616 };
617 if type_hash != TypeHash::of::<T>() || type_hash == TypeHash::of::<DataStackRegisterTag>() {
618 return None;
619 }
620 self.position -= type_layout.size();
621 let result = unsafe {
622 self.memory
623 .as_ptr()
624 .add(self.position - value_layout.size())
625 .cast::<T>()
626 .read_unaligned()
627 };
628 self.position -= value_layout.size();
629 Some(result)
630 }
631
632 #[allow(clippy::type_complexity)]
634 pub unsafe fn pop_raw(&mut self) -> Option<(Layout, TypeHash, unsafe fn(*mut ()), Vec<u8>)> {
635 if !self.mode.allows_values() {
636 return None;
637 }
638 let type_layout = Layout::new::<TypeHash>().pad_to_align();
639 if self.position < type_layout.size() {
640 return None;
641 }
642 let type_hash = unsafe {
643 self.memory
644 .as_mut_ptr()
645 .add(self.position - type_layout.size())
646 .cast::<TypeHash>()
647 .read_unaligned()
648 };
649 if type_hash == TypeHash::of::<DataStackRegisterTag>() {
650 return None;
651 }
652 let finalizer = self.finalizers.get(&type_hash)?;
653 if self.position < type_layout.size() + finalizer.layout.size() {
654 return None;
655 }
656 self.position -= type_layout.size();
657 let data = self.memory[(self.position - finalizer.layout.size())..self.position].to_vec();
658 self.position -= finalizer.layout.size();
659 Some((finalizer.layout, type_hash, finalizer.callback, data))
660 }
661
662 pub fn drop(&mut self) -> bool {
663 if !self.mode.allows_values() {
664 return false;
665 }
666 let type_layout = Layout::new::<TypeHash>().pad_to_align();
667 self.position -= type_layout.size();
668 let type_hash = unsafe {
669 self.memory
670 .as_ptr()
671 .add(self.position)
672 .cast::<TypeHash>()
673 .read_unaligned()
674 };
675 if type_hash == TypeHash::of::<DataStackRegisterTag>() {
676 return false;
677 }
678 if let Some(finalizer) = self.finalizers.get(&type_hash) {
679 self.position -= finalizer.layout.size();
680 unsafe {
681 (finalizer.callback)(self.memory.as_mut_ptr().add(self.position).cast::<()>());
682 }
683 }
684 true
685 }
686
687 pub fn drop_register(&mut self) -> bool {
688 if !self.mode.allows_registers() {
689 return false;
690 }
691 let tag_layout = Layout::new::<DataStackRegisterTag>().pad_to_align();
692 let type_layout = Layout::new::<TypeHash>().pad_to_align();
693 unsafe {
694 let type_hash = self
695 .memory
696 .as_mut_ptr()
697 .add(self.position - type_layout.size())
698 .cast::<TypeHash>()
699 .read_unaligned();
700 if type_hash != TypeHash::of::<DataStackRegisterTag>() {
701 return false;
702 }
703 self.position -= type_layout.size();
704 self.position -= tag_layout.size();
705 let tag = self
706 .memory
707 .as_ptr()
708 .add(self.position)
709 .cast::<DataStackRegisterTag>()
710 .read_unaligned();
711 self.position -= tag.layout.size() - tag.padding as usize;
712 if let Some(finalizer) = tag.finalizer {
713 (finalizer)(self.memory.as_mut_ptr().add(self.position).cast::<()>());
714 }
715 self.registers.pop();
716 }
717 true
718 }
719
720 pub fn pop_stack(&mut self, mut data_count: usize, capacity: Option<usize>) -> Self {
721 let type_layout = Layout::new::<TypeHash>().pad_to_align();
722 let mut size = 0;
723 let mut position = self.position;
724 let mut finalizers = HashMap::new();
725 while data_count > 0 && position > 0 {
726 data_count -= 1;
727 position -= type_layout.size();
728 size += type_layout.size();
729 let type_hash = unsafe {
730 self.memory
731 .as_mut_ptr()
732 .add(position)
733 .cast::<TypeHash>()
734 .read_unaligned()
735 };
736 if let Some(finalizer) = self.finalizers.get(&type_hash) {
737 position -= finalizer.layout.size();
738 size += finalizer.layout.size();
739 finalizers.insert(
740 type_hash,
741 DataStackFinalizer {
742 callback: finalizer.callback,
743 layout: finalizer.layout,
744 },
745 );
746 }
747 }
748 let mut result = Self::new(capacity.unwrap_or(size).max(size), self.mode);
749 result.memory[0..size].copy_from_slice(&self.memory[position..self.position]);
750 result.finalizers.extend(finalizers);
751 self.position = position;
752 result.position = size;
753 result
754 }
755
756 pub fn pop_to_register(&mut self, register: &mut DataStackRegisterAccess) -> bool {
757 if !self.mode.allows_values() {
758 return false;
759 }
760 let type_layout = Layout::new::<TypeHash>().pad_to_align();
761 if self.position < type_layout.size() {
762 return false;
763 }
764 let type_hash = unsafe {
765 self.memory
766 .as_mut_ptr()
767 .add(self.position - type_layout.size())
768 .cast::<TypeHash>()
769 .read_unaligned()
770 };
771 let mut tag = unsafe {
772 register
773 .stack
774 .memory
775 .as_ptr()
776 .add(register.position)
777 .cast::<DataStackRegisterTag>()
778 .read_unaligned()
779 };
780 if type_hash != tag.type_hash || type_hash == TypeHash::of::<DataStackRegisterTag>() {
781 return false;
782 }
783 if self.position < type_layout.size() + tag.layout.size() {
784 return false;
785 }
786 let finalizer = match self.finalizers.get(&type_hash) {
787 Some(finalizer) => finalizer.callback,
788 None => return false,
789 };
790 unsafe {
791 if let Some(finalizer) = tag.finalizer {
792 (finalizer)(
793 register
794 .stack
795 .memory
796 .as_mut_ptr()
797 .add(register.position - tag.layout.size())
798 .cast::<()>(),
799 );
800 }
801 tag.finalizer = Some(finalizer);
802 let source = self
803 .memory
804 .as_ptr()
805 .add(self.position - type_layout.size() - tag.layout.size());
806 let target = register
807 .stack
808 .memory
809 .as_mut_ptr()
810 .add(register.position - tag.layout.size());
811 target.copy_from(source, tag.layout.size());
812 register
813 .stack
814 .memory
815 .as_mut_ptr()
816 .add(register.position)
817 .cast::<DataStackRegisterTag>()
818 .write_unaligned(tag);
819 }
820 self.position -= type_layout.size();
821 self.position -= tag.layout.size();
822 true
823 }
824
825 pub fn store(&self) -> DataStackToken {
826 DataStackToken(self.position)
827 }
828
829 pub fn restore(&mut self, token: DataStackToken) {
830 let type_layout = Layout::new::<TypeHash>().pad_to_align();
831 let tag_layout = Layout::new::<DataStackRegisterTag>().pad_to_align();
832 let tag_type_hash = TypeHash::of::<DataStackRegisterTag>();
833 while self.position > token.0 {
834 self.position -= type_layout.size();
835 let type_hash = unsafe {
836 self.memory
837 .as_ptr()
838 .add(self.position)
839 .cast::<TypeHash>()
840 .read_unaligned()
841 };
842 if type_hash == tag_type_hash {
843 unsafe {
844 let tag = self
845 .memory
846 .as_ptr()
847 .add(self.position - tag_layout.size())
848 .cast::<DataStackRegisterTag>()
849 .read_unaligned();
850 self.position -= tag_layout.size();
851 self.position -= tag.layout.size();
852 if let Some(finalizer) = tag.finalizer {
853 (finalizer)(self.memory.as_mut_ptr().add(self.position).cast::<()>());
854 }
855 self.position -= tag.padding as usize;
856 self.registers.pop();
857 }
858 } else if let Some(finalizer) = self.finalizers.get(&type_hash) {
859 self.position -= finalizer.layout.size();
860 unsafe {
861 (finalizer.callback)(self.memory.as_mut_ptr().add(self.position).cast::<()>());
862 }
863 }
864 }
865 }
866
867 pub fn reverse(&mut self, token: DataStackToken) {
868 let size = self.position.saturating_sub(token.0);
869 let mut meta_data = SmallVec::<[_; 8]>::with_capacity(8);
870 let mut meta_registers = 0;
871 let type_layout = Layout::new::<TypeHash>().pad_to_align();
872 let tag_layout = Layout::new::<DataStackRegisterTag>().pad_to_align();
873 let tag_type_hash = TypeHash::of::<DataStackRegisterTag>();
874 let mut position = self.position;
875 while position > token.0 {
876 position -= type_layout.size();
877 let type_hash = unsafe {
878 self.memory
879 .as_mut_ptr()
880 .add(position)
881 .cast::<TypeHash>()
882 .read_unaligned()
883 };
884 if type_hash == tag_type_hash {
885 unsafe {
886 let tag = self
887 .memory
888 .as_ptr()
889 .add(self.position - tag_layout.size())
890 .cast::<DataStackRegisterTag>()
891 .read_unaligned();
892 position -= tag_layout.size();
893 position -= tag.layout.size();
894 meta_data.push((
895 position - token.0,
896 type_layout.size() + tag_layout.size() + tag.layout.size(),
897 ));
898 meta_registers += 1;
899 }
900 } else if let Some(finalizer) = self.finalizers.get(&type_hash) {
901 position -= finalizer.layout.size();
902 meta_data.push((
903 position - token.0,
904 type_layout.size() + finalizer.layout.size(),
905 ));
906 }
907 }
908 if meta_data.len() <= 1 {
909 return;
910 }
911 let mut memory = SmallVec::<[_; 256]>::new();
912 memory.resize(size, 0);
913 memory.copy_from_slice(&self.memory[token.0..self.position]);
914 for (source_position, size) in meta_data {
915 self.memory[position..(position + size)]
916 .copy_from_slice(&memory[source_position..(source_position + size)]);
917 position += size;
918 }
919 let start = self.registers.len() - meta_registers;
920 self.registers[start..].reverse();
921 }
922
923 pub fn peek(&self) -> Option<TypeHash> {
924 if self.position == 0 {
925 return None;
926 }
927 let type_layout = Layout::new::<TypeHash>().pad_to_align();
928 Some(unsafe {
929 self.memory
930 .as_ptr()
931 .add(self.position - type_layout.size())
932 .cast::<TypeHash>()
933 .read_unaligned()
934 })
935 }
936
937 pub fn registers_count(&self) -> usize {
938 self.registers.len()
939 }
940
941 pub fn access_register(&mut self, index: usize) -> Option<DataStackRegisterAccess> {
942 let position = *self.registers.get(index)?;
943 Some(DataStackRegisterAccess {
944 stack: self,
945 position,
946 })
947 }
948
949 pub fn access_registers_pair(
950 &mut self,
951 a: usize,
952 b: usize,
953 ) -> Option<(DataStackRegisterAccess, DataStackRegisterAccess)> {
954 if a == b {
955 return None;
956 }
957 let position_a = *self.registers.get(a)?;
958 let position_b = *self.registers.get(b)?;
959 unsafe {
960 Some((
961 DataStackRegisterAccess {
962 stack: (self as *mut Self).as_mut()?,
963 position: position_a,
964 },
965 DataStackRegisterAccess {
966 stack: (self as *mut Self).as_mut()?,
967 position: position_b,
968 },
969 ))
970 }
971 }
972
973 pub unsafe fn prevent_drop(&mut self) {
975 self.drop = false;
976 }
977
978 #[inline]
980 unsafe fn alignment_padding(&self, alignment: usize) -> usize {
981 pointer_alignment_padding(
982 unsafe { self.memory.as_ptr().add(self.position) },
983 alignment,
984 )
985 }
986}
987
988pub trait DataStackPack: Sized {
989 fn stack_push(self, stack: &mut DataStack);
990
991 fn stack_push_reversed(self, stack: &mut DataStack) {
992 let token = stack.store();
993 self.stack_push(stack);
994 stack.reverse(token);
995 }
996
997 fn stack_pop(stack: &mut DataStack) -> Self;
998
999 fn pack_types() -> Vec<TypeHash>;
1000}
1001
1002impl DataStackPack for () {
1003 fn stack_push(self, _: &mut DataStack) {}
1004
1005 fn stack_pop(_: &mut DataStack) -> Self {}
1006
1007 fn pack_types() -> Vec<TypeHash> {
1008 vec![]
1009 }
1010}
1011
1012macro_rules! impl_data_stack_tuple {
1013 ($($type:ident),+) => {
1014 impl<$($type: 'static),+> DataStackPack for ($($type,)+) {
1015 #[allow(non_snake_case)]
1016 fn stack_push(self, stack: &mut DataStack) {
1017 let ($( $type, )+) = self;
1018 $( stack.push($type); )+
1019 }
1020
1021 #[allow(non_snake_case)]
1022 fn stack_pop(stack: &mut DataStack) -> Self {
1023 ($(
1024 stack.pop::<$type>().unwrap_or_else(
1025 || panic!("Could not pop data of type: {}", std::any::type_name::<$type>())
1026 ),
1027 )+)
1028 }
1029
1030 #[allow(non_snake_case)]
1031 fn pack_types() -> Vec<TypeHash> {
1032 vec![ $( TypeHash::of::<$type>() ),+ ]
1033 }
1034 }
1035 };
1036}
1037
1038impl_data_stack_tuple!(A);
1039impl_data_stack_tuple!(A, B);
1040impl_data_stack_tuple!(A, B, C);
1041impl_data_stack_tuple!(A, B, C, D);
1042impl_data_stack_tuple!(A, B, C, D, E);
1043impl_data_stack_tuple!(A, B, C, D, E, F);
1044impl_data_stack_tuple!(A, B, C, D, E, F, G);
1045impl_data_stack_tuple!(A, B, C, D, E, F, G, H);
1046impl_data_stack_tuple!(A, B, C, D, E, F, G, H, I);
1047impl_data_stack_tuple!(A, B, C, D, E, F, G, H, I, J);
1048impl_data_stack_tuple!(A, B, C, D, E, F, G, H, I, J, K);
1049impl_data_stack_tuple!(A, B, C, D, E, F, G, H, I, J, K, L);
1050impl_data_stack_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M);
1051impl_data_stack_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M, N);
1052impl_data_stack_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O);
1053impl_data_stack_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P);
1054
1055#[cfg(test)]
1056mod tests {
1057 use crate::{
1058 data_stack::{DataStack, DataStackMode},
1059 type_hash::TypeHash,
1060 };
1061 use std::{alloc::Layout, cell::RefCell, rc::Rc};
1062
1063 #[test]
1064 fn test_data_stack() {
1065 struct Droppable(Rc<RefCell<bool>>);
1066
1067 impl Drop for Droppable {
1068 fn drop(&mut self) {
1069 *self.0.borrow_mut() = true;
1070 }
1071 }
1072
1073 let dropped = Rc::new(RefCell::new(false));
1074 let mut stack = DataStack::new(10240, DataStackMode::Values);
1075 assert_eq!(stack.size(), 16384);
1076 assert_eq!(stack.position(), 0);
1077 stack.push(Droppable(dropped.clone()));
1078 assert_eq!(stack.position(), 16);
1079 let token = stack.store();
1080 stack.push(42_usize);
1081 assert_eq!(stack.position(), 32);
1082 stack.push(true);
1083 assert_eq!(stack.position(), 41);
1084 stack.push(4.2_f32);
1085 assert_eq!(stack.position(), 53);
1086 assert!(!*dropped.borrow());
1087 assert!(stack.pop::<()>().is_none());
1088 stack.push(());
1089 assert_eq!(stack.position(), 61);
1090 stack.reverse(token);
1091 let mut stack2 = stack.pop_stack(2, None);
1092 assert_eq!(stack.position(), 36);
1093 assert_eq!(stack2.size(), 32);
1094 assert_eq!(stack2.position(), 25);
1095 assert_eq!(stack2.pop::<usize>().unwrap(), 42_usize);
1096 assert_eq!(stack2.position(), 9);
1097 assert!(stack2.pop::<bool>().unwrap());
1098 assert_eq!(stack2.position(), 0);
1099 stack2.push(true);
1100 stack2.push(42_usize);
1101 stack.push_stack(stack2).ok().unwrap();
1102 assert_eq!(stack.position(), 61);
1103 assert_eq!(stack.pop::<usize>().unwrap(), 42_usize);
1104 assert_eq!(stack.position(), 45);
1105 assert!(stack.pop::<bool>().unwrap());
1106 assert_eq!(stack.position(), 36);
1107 assert_eq!(stack.pop::<f32>().unwrap(), 4.2_f32);
1108 assert_eq!(stack.position(), 24);
1109 stack.pop::<()>().unwrap();
1110 assert_eq!(stack.position(), 16);
1111 stack.push(42_usize);
1112 unsafe {
1113 let (layout, type_hash, finalizer, data) = stack.pop_raw().unwrap();
1114 assert_eq!(layout, Layout::new::<usize>().pad_to_align());
1115 assert_eq!(type_hash, TypeHash::of::<usize>());
1116 assert!(stack.push_raw(layout, type_hash, finalizer, &data));
1117 assert_eq!(stack.position(), 32);
1118 assert_eq!(stack.pop::<usize>().unwrap(), 42_usize);
1119 assert_eq!(stack.position(), 16);
1120 }
1121 drop(stack);
1122 assert!(*dropped.borrow());
1123
1124 let mut stack = DataStack::new(10240, DataStackMode::Registers);
1125 assert_eq!(stack.size(), 16384);
1126 stack.push_register::<bool>().unwrap();
1127 stack.drop_register();
1128 let a = stack.push_register_value(true).unwrap();
1129 assert!(*stack.access_register(a).unwrap().read::<bool>().unwrap());
1130 assert!(stack.access_register(a).unwrap().take::<bool>().unwrap());
1131 assert!(!stack.access_register(a).unwrap().has_value());
1132 let b = stack.push_register_value(0usize).unwrap();
1133 stack.access_register(b).unwrap().set(42usize);
1134 assert_eq!(
1135 *stack.access_register(b).unwrap().read::<usize>().unwrap(),
1136 42
1137 );
1138 }
1139}