scryer_prolog/machine/
stack.rs1use core::marker::PhantomData;
2
3use crate::raw_block::*;
4use crate::types::*;
5
6use std::mem;
7use std::ops::{Index, IndexMut};
8use std::ptr;
9
10impl RawBlockTraits for Stack {
11 #[inline]
12 fn init_size() -> usize {
13 10 * 1024 * 1024
14 }
15
16 #[inline]
17 fn align() -> usize {
18 mem::align_of::<HeapCellValue>()
19 }
20}
21
22#[inline(always)]
23pub const fn prelude_size<Prelude>() -> usize {
24 mem::size_of::<Prelude>()
25}
26
27#[derive(Debug)]
28pub struct Stack {
29 buf: RawBlock<Stack>,
30 _marker: PhantomData<HeapCellValue>,
31}
32
33#[derive(Debug)]
34pub(crate) struct AndFramePrelude {
35 pub(crate) num_cells: usize,
36 pub(crate) e: usize,
37 pub(crate) cp: usize,
38}
39
40#[derive(Debug)]
41pub(crate) struct AndFrame {
42 pub(crate) prelude: AndFramePrelude,
43}
44
45impl AndFrame {
46 pub(crate) fn size_of(num_cells: usize) -> usize {
47 prelude_size::<AndFramePrelude>() + num_cells * mem::size_of::<HeapCellValue>()
48 }
49}
50
51impl Index<usize> for AndFrame {
52 type Output = HeapCellValue;
53
54 fn index(&self, index: usize) -> &Self::Output {
55 let prelude_offset = prelude_size::<AndFramePrelude>();
56 let index_offset = (index - 1) * mem::size_of::<HeapCellValue>();
57
58 unsafe {
59 let ptr = self as *const crate::machine::stack::AndFrame as *const u8;
60 let ptr = ptr as usize + prelude_offset + index_offset;
61
62 &*(ptr as *const HeapCellValue)
63 }
64 }
65}
66
67impl IndexMut<usize> for AndFrame {
68 fn index_mut(&mut self, index: usize) -> &mut Self::Output {
69 let prelude_offset = prelude_size::<AndFramePrelude>();
70 let index_offset = (index - 1) * mem::size_of::<HeapCellValue>();
71
72 unsafe {
73 let ptr = self as *mut crate::machine::stack::AndFrame as *const u8;
74 let ptr = ptr as usize + prelude_offset + index_offset;
75
76 &mut *(ptr as *mut HeapCellValue)
77 }
78 }
79}
80
81impl Index<usize> for Stack {
82 type Output = HeapCellValue;
83
84 #[inline]
85 fn index(&self, index: usize) -> &Self::Output {
86 unsafe {
87 let ptr = self.buf.base as usize + index;
88 &*(ptr as *const HeapCellValue)
89 }
90 }
91}
92
93impl IndexMut<usize> for Stack {
94 #[inline]
95 fn index_mut(&mut self, index: usize) -> &mut Self::Output {
96 unsafe {
97 let ptr = self.buf.base as usize + index;
98 &mut *(ptr as *mut HeapCellValue)
99 }
100 }
101}
102
103#[derive(Debug)]
104pub(crate) struct OrFramePrelude {
105 pub(crate) num_cells: usize,
106 pub(crate) e: usize,
107 pub(crate) cp: usize,
108 pub(crate) b: usize,
109 pub(crate) bp: usize,
110 pub(crate) boip: u32,
111 pub(crate) biip: u32,
112 pub(crate) tr: usize,
113 pub(crate) h: usize,
114 pub(crate) b0: usize,
115 pub(crate) attr_var_queue_len: usize,
116}
117
118#[derive(Debug)]
119pub(crate) struct OrFrame {
120 pub(crate) prelude: OrFramePrelude,
121}
122
123impl Index<usize> for OrFrame {
124 type Output = HeapCellValue;
125
126 #[inline]
127 fn index(&self, index: usize) -> &Self::Output {
128 let prelude_offset = prelude_size::<OrFramePrelude>();
129 let index_offset = index * mem::size_of::<HeapCellValue>();
130
131 unsafe {
132 let ptr = self as *const crate::machine::stack::OrFrame as *const u8;
133 let ptr = ptr as usize + prelude_offset + index_offset;
134
135 &*(ptr as *const HeapCellValue)
136 }
137 }
138}
139
140impl IndexMut<usize> for OrFrame {
141 #[inline]
142 fn index_mut(&mut self, index: usize) -> &mut Self::Output {
143 let prelude_offset = prelude_size::<OrFramePrelude>();
144 let index_offset = index * mem::size_of::<HeapCellValue>();
145
146 unsafe {
147 let ptr = self as *mut crate::machine::stack::OrFrame as *const u8;
148 let ptr = ptr as usize + prelude_offset + index_offset;
149
150 &mut *(ptr as *mut HeapCellValue)
151 }
152 }
153}
154
155impl OrFrame {
156 pub(crate) fn size_of(num_cells: usize) -> usize {
157 prelude_size::<OrFramePrelude>() + num_cells * mem::size_of::<HeapCellValue>()
158 }
159}
160
161impl Stack {
162 pub(crate) fn new() -> Self {
163 Stack {
164 buf: RawBlock::new(),
165 _marker: PhantomData,
166 }
167 }
168
169 #[inline(always)]
170 unsafe fn alloc(&mut self, frame_size: usize) -> *mut u8 {
171 loop {
172 let ptr = self.buf.alloc(frame_size);
173
174 if ptr.is_null() {
175 self.buf.grow();
176 } else {
177 return ptr;
178 }
179 }
180 }
181
182 pub(crate) fn allocate_and_frame(&mut self, num_cells: usize) -> usize {
183 let frame_size = AndFrame::size_of(num_cells);
184
185 unsafe {
186 let e = (*self.buf.ptr.get_mut()) as usize - self.buf.base as usize;
187 let new_ptr = self.alloc(frame_size);
188 let mut offset = prelude_size::<AndFramePrelude>();
189
190 for idx in 0..num_cells {
191 ptr::write(
192 new_ptr.add(offset) as *mut HeapCellValue,
193 stack_loc_as_cell!(AndFrame, e, idx + 1),
194 );
195
196 offset += mem::size_of::<HeapCellValue>();
197 }
198
199 let and_frame = self.index_and_frame_mut(e);
200 and_frame.prelude.num_cells = num_cells;
201
202 e
203 }
204 }
205
206 pub(crate) fn top(&self) -> usize {
207 unsafe { (*self.buf.ptr.get()) as usize - self.buf.base as usize }
208 }
209
210 pub(crate) fn allocate_or_frame(&mut self, num_cells: usize) -> usize {
211 let frame_size = OrFrame::size_of(num_cells);
212
213 unsafe {
214 let b = (*self.buf.ptr.get_mut()) as usize - self.buf.base as usize;
215 let new_ptr = self.alloc(frame_size);
216 let mut offset = prelude_size::<OrFramePrelude>();
217
218 for idx in 0..num_cells {
219 ptr::write(
220 (new_ptr as usize + offset) as *mut HeapCellValue,
221 stack_loc_as_cell!(OrFrame, b, idx),
222 );
223
224 offset += mem::size_of::<HeapCellValue>();
225 }
226
227 let or_frame = self.index_or_frame_mut(b);
228 or_frame.prelude.num_cells = num_cells;
229
230 b
231 }
232 }
233
234 #[inline(always)]
235 pub(crate) fn index_and_frame(&self, e: usize) -> &AndFrame {
236 unsafe {
237 let ptr = self.buf.base as usize + e;
238 &*(ptr as *const AndFrame)
239 }
240 }
241
242 #[inline(always)]
243 pub(crate) fn index_and_frame_mut(&mut self, e: usize) -> &mut AndFrame {
244 unsafe {
245 let ptr = self.buf.base.add(e);
247 &mut *(ptr as *mut AndFrame)
248 }
249 }
250
251 #[inline(always)]
252 pub(crate) fn index_or_frame(&self, b: usize) -> &OrFrame {
253 unsafe {
254 let ptr = self.buf.base as usize + b;
255 &*(ptr as *const OrFrame)
256 }
257 }
258
259 #[inline(always)]
260 pub(crate) fn index_or_frame_mut(&mut self, b: usize) -> &mut OrFrame {
261 unsafe {
262 let ptr = self.buf.base as usize + b;
263 &mut *(ptr as *mut OrFrame)
264 }
265 }
266
267 #[inline(always)]
268 pub(crate) fn truncate(&mut self, b: usize) {
269 let base = self.buf.base as usize + b;
270
271 if base < (*self.buf.ptr.get_mut()) as usize {
272 *self.buf.ptr.get_mut() = base as *mut _;
273 }
274 }
275}
276
277#[cfg(test)]
278mod tests {
279 use super::*;
280
281 use crate::machine::mock_wam::*;
282
283 #[test]
284 #[cfg_attr(miri, ignore)]
285 fn stack_tests() {
286 let mut wam = MockWAM::new();
287
288 let e = wam.machine_st.stack.allocate_and_frame(10); let and_frame = wam.machine_st.stack.index_and_frame_mut(e);
290
291 assert_eq!(
292 e,
293 0 );
295
296 assert_eq!(and_frame.prelude.num_cells, 10);
297
298 for idx in 0..10 {
299 assert_eq!(and_frame[idx + 1], stack_loc_as_cell!(AndFrame, e, idx + 1));
300 }
301
302 and_frame[5] = empty_list_as_cell!();
303
304 assert_eq!(and_frame[5], empty_list_as_cell!());
305
306 let b = wam.machine_st.stack.allocate_or_frame(5);
307
308 let or_frame = wam.machine_st.stack.index_or_frame_mut(b);
309
310 for idx in 0..5 {
311 assert_eq!(or_frame[idx], stack_loc_as_cell!(OrFrame, b, idx));
312 }
313
314 let next_e = wam.machine_st.stack.allocate_and_frame(9); let and_frame = wam.machine_st.stack.index_and_frame_mut(next_e);
316
317 for idx in 0..9 {
318 assert_eq!(
319 and_frame[idx + 1],
320 stack_loc_as_cell!(AndFrame, next_e, idx + 1)
321 );
322 }
323
324 let and_frame = wam.machine_st.stack.index_and_frame(e);
325 assert_eq!(and_frame[5], empty_list_as_cell!());
326
327 assert_eq!(
328 wam.machine_st.stack[stack_loc!(AndFrame, e, 5)],
329 empty_list_as_cell!()
330 );
331 }
332}