gnostr_xq/compile/
compiler.rs

1use std::{
2    cell::RefCell,
3    collections::{HashMap, HashSet},
4    fmt::{Debug, Formatter},
5    rc::Rc,
6    slice::from_ref,
7};
8
9use itertools::Itertools;
10use thiserror::Error;
11use xq_lang::ast::{
12    self, BinaryArithmeticOp, BinaryOp, BindPattern, ConstantPrimitive, FuncArg, FuncDef,
13    Identifier, ObjectBindPatternEntry, Query, StringFragment, Suffix, Term, UpdateOp,
14};
15
16use crate::{
17    data_structure::PHashMap,
18    intrinsic,
19    module_loader::{ModuleLoadError, ModuleLoader},
20    value::Array,
21    vm::{
22        bytecode::{ClosureAddress, NamedFn0, NamedFn1, NamedFn2},
23        Address, ByteCode, Program, ScopeId, ScopedSlot,
24    },
25    Number, Value,
26};
27
28/// # Function calling convention
29/// ## Caller
30/// for `func(arg0, arg1, closure2)`
31/// - push arg0
32/// - push arg1
33/// - push_closure { (copy of the current scope), (start address) }
34/// - call (&func, return address)
35///
36/// ## Callee
37/// for `func($arg0, $arg1, closure2)`
38/// - new_frame { scope id, return address }
39/// - pop_closure
40/// - pop slot_1
41/// - pop slot_0
42/// - (function body here)
43/// - return // pop frame
44///
45/// # Important note
46/// Things are compiled backwards. This makes us handling jump-ish code much easier, since most of
47/// the jumps (or reference to an address of a code to be precise) are going forward, so we don't
48/// have to do something like "acquire a placeholder, replace it with jump when the jump target
49/// address was determined".
50
51#[derive(Debug, Error)]
52pub enum CompileError {
53    #[error("Use of unknown variable `{0:}`")]
54    UnknownVariable(Identifier),
55    #[error("Use of unknown function `{0:}` that takes {1:} arguments")]
56    UnknownFunction(Identifier, usize),
57    #[error("Bind pattern has the same variable `{0:}`")]
58    SameVariableInPattern(Identifier),
59    #[error("Unknown label `{0:}`")]
60    UnknownLabel(Identifier),
61    #[error(transparent)]
62    ModuleLoadError(#[from] ModuleLoadError),
63    #[error("Unknown string formatter `{0:?}`")]
64    UnknownStringFormatter(Identifier),
65}
66
67type Result<T, E = CompileError> = std::result::Result<T, E>;
68
69#[derive(Debug, Clone, Eq, PartialEq, Hash)]
70pub(crate) struct FunctionIdentifier(pub(crate) Identifier, pub(crate) usize);
71#[derive(Debug, Clone, Eq, PartialEq, Hash)]
72pub(crate) struct DeclaredFunction {
73    /// The address for normal function call. [ByteCode::NewFrame] will be there.
74    address: Address,
75    /// The address for tail call that doesn't require caller's frame anymore.
76    tail_call_discard_frame: Address,
77    /// The address for tail call in case the caller's frame has to be preserved.
78    /// Compiles either to a [ByteCode::Jump] or to the normal path [ByteCode::NewFrame].
79    tail_call_preserve_frame: Address,
80    /// The list of arg type.
81    arg_types: Vec<ArgType>,
82}
83#[derive(Debug, Clone, Eq, PartialEq, Hash)]
84pub(crate) enum ArgType {
85    /// arg
86    Closure,
87    /// $arg
88    Value,
89}
90#[derive(Clone)]
91enum FunctionLike {
92    Function(DeclaredFunction),
93    Closure(ScopedSlot),
94    Intrinsic(ByteCode, Vec<ArgType>),
95    ManuallyImplemented(
96        &'static str,
97        fn(&mut Compiler, &[Query], Address) -> Result<Address>,
98    ),
99}
100impl Debug for FunctionLike {
101    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
102        match self {
103            FunctionLike::Function(func) => f.debug_tuple("DeclaredFunction").field(func).finish(),
104            FunctionLike::Closure(slot) => f.debug_tuple("Closure").field(slot).finish(),
105            FunctionLike::Intrinsic(code, args) => {
106                f.debug_tuple("Intrinsic").field(code).field(args).finish()
107            }
108            FunctionLike::ManuallyImplemented(name, _) => {
109                f.debug_tuple("ManuallyImplemented").field(name).finish()
110            }
111        }
112    }
113}
114
115#[derive(Debug, Clone, Eq, PartialEq, Hash)]
116struct PlaceHolder(Address);
117
118struct CodeEmitter {
119    code: Vec<ByteCode>,
120}
121
122impl CodeEmitter {
123    fn new() -> Self {
124        Self {
125            code: vec![ByteCode::Output, ByteCode::Backtrack, ByteCode::Unreachable],
126        }
127    }
128
129    fn address(&self) -> Address {
130        Address(self.code.len() - 1)
131    }
132
133    fn output(&self) -> Address {
134        Address(0)
135    }
136
137    fn backtrack(&self) -> Address {
138        Address(1)
139    }
140
141    fn follow_jump(&self, mut address: Address) -> Address {
142        while let ByteCode::Jump(next) = self.code[address.0] {
143            address = next;
144        }
145        address
146    }
147
148    fn jump_or_follow(&mut self, address: Address) {
149        if address.0 + 1 == self.code.len() {
150            return;
151        }
152        let address = self.follow_jump(address);
153        let code = match self.code.get(address.0) {
154            Some(ByteCode::Unreachable) => ByteCode::Unreachable,
155            Some(ByteCode::Call(func_address)) => {
156                let func_address = *func_address;
157                self.jump_or_follow(address.get_next());
158                ByteCode::Call(func_address)
159            }
160            Some(ByteCode::TailCall(address)) => ByteCode::TailCall(*address),
161            Some(ByteCode::Backtrack) => ByteCode::Backtrack,
162            Some(ByteCode::Ret) => ByteCode::Ret,
163            Some(ByteCode::Output) => ByteCode::Output,
164            _ => ByteCode::Jump(address),
165        };
166        self.code.push(code);
167    }
168
169    fn get_next_op(&self, next: Address) -> &ByteCode {
170        &self.code[self.follow_jump(next).0]
171    }
172
173    fn emit_normal_op(&mut self, code: ByteCode, next: Address) -> Address {
174        self.jump_or_follow(next);
175        self.code.push(code);
176        self.address()
177    }
178
179    fn emit_terminal_op(&mut self, code: ByteCode) -> Address {
180        self.code.push(code);
181        self.address()
182    }
183
184    fn emit_constant_primitive<V>(&mut self, value: V, next: Address) -> Address
185    where
186        V: Into<ConstantPrimitive>,
187    {
188        match value.into() {
189            ConstantPrimitive::Null => self.emit_constant(Value::Null, next),
190            ConstantPrimitive::False => self.emit_constant(false, next),
191            ConstantPrimitive::True => self.emit_constant(true, next),
192            ConstantPrimitive::Number(v) => self.emit_constant(Number::from(v.0), next),
193            ConstantPrimitive::String(s) => self.emit_constant(s, next),
194        }
195    }
196
197    fn emit_constant<V>(&mut self, value: V, next: Address) -> Address
198    where
199        V: Into<Value>,
200    {
201        self.emit_normal_op(ByteCode::Const(value.into()), next)
202    }
203
204    fn emit_push<V>(&mut self, value: V, next: Address) -> Address
205    where
206        V: Into<Value>,
207    {
208        self.emit_normal_op(ByteCode::Push(value.into()), next)
209    }
210
211    fn emit_fork(&mut self, fork_pc: Address, next: Address) -> Address {
212        self.emit_normal_op(ByteCode::Fork { fork_pc }, next)
213    }
214
215    fn emit_terminal_placeholder(&mut self) -> (Address, PlaceHolder) {
216        let address = self.emit_terminal_op(ByteCode::PlaceHolder);
217        (address, PlaceHolder(address))
218    }
219
220    fn replace_placeholder(&mut self, placeholder: PlaceHolder, code: ByteCode) {
221        assert!(matches!(
222            self.code.get(placeholder.0 .0),
223            Some(ByteCode::PlaceHolder)
224        ));
225        self.code[placeholder.0 .0] = code;
226    }
227}
228
229#[derive(Debug, Clone)]
230struct Scope {
231    id: ScopeId,
232    next_variable_slot_id: usize,
233    next_closure_slot_id: usize,
234    next_label_slot_id: usize,
235    functions: PHashMap<FunctionIdentifier, FunctionLike>,
236    variables: PHashMap<Identifier, ScopedSlot>,
237    labels: PHashMap<Identifier, ScopedSlot>,
238    leaked_scope_ids: Rc<RefCell<HashSet<ScopeId>>>,
239}
240
241impl Scope {
242    fn new(id: ScopeId) -> Self {
243        Self {
244            id,
245            next_variable_slot_id: 0,
246            next_closure_slot_id: 0,
247            next_label_slot_id: 0,
248            functions: Default::default(),
249            variables: Default::default(),
250            labels: Default::default(),
251            leaked_scope_ids: Rc::new(RefCell::new(Default::default())),
252        }
253    }
254
255    fn nested(id: ScopeId, previous: &Self) -> Self {
256        Self {
257            id,
258            next_variable_slot_id: 0,
259            next_closure_slot_id: 0,
260            next_label_slot_id: 0,
261            functions: previous.functions.clone(),
262            variables: previous.variables.clone(),
263            labels: previous.labels.clone(),
264            leaked_scope_ids: previous.leaked_scope_ids.clone(),
265        }
266    }
267
268    fn require_slot(&self) -> bool {
269        self.next_variable_slot_id > 0
270            || self.next_closure_slot_id > 0
271            || self.next_label_slot_id > 0
272    }
273
274    fn has_slot_leaked_scope(&self) -> bool {
275        self.leaked_scope_ids.borrow().contains(&self.id)
276    }
277
278    fn allocate_variable(&mut self) -> ScopedSlot {
279        let slot = ScopedSlot(self.id, self.next_variable_slot_id);
280        self.next_variable_slot_id += 1;
281        slot
282    }
283
284    fn register_variable(&mut self, name: Identifier) -> ScopedSlot {
285        let slot = self.allocate_variable();
286        self.variables.insert(name, slot);
287        slot
288    }
289
290    fn register_function(&mut self, name: Identifier, function: DeclaredFunction) {
291        self.functions.insert(
292            FunctionIdentifier(name, function.arg_types.len()),
293            FunctionLike::Function(function),
294        );
295    }
296
297    fn register_closure(&mut self, name: Identifier) -> ScopedSlot {
298        let slot = ScopedSlot(self.id, self.next_closure_slot_id);
299        self.next_closure_slot_id += 1;
300        self.functions
301            .insert(FunctionIdentifier(name, 0), FunctionLike::Closure(slot));
302        slot
303    }
304
305    fn allocate_label(&mut self) -> ScopedSlot {
306        let slot = ScopedSlot(self.id, self.next_label_slot_id);
307        self.next_label_slot_id += 1;
308        slot
309    }
310
311    fn register_label(&mut self, name: Identifier) -> ScopedSlot {
312        let slot = ScopedSlot(self.id, self.next_label_slot_id);
313        self.next_label_slot_id += 1;
314        self.labels.insert(name, slot);
315        slot
316    }
317
318    fn lookup_variable(&self, name: &Identifier) -> Option<&ScopedSlot> {
319        let ret = self.variables.get(name);
320        if let Some(ScopedSlot(id, _)) = ret {
321            if id != &self.id {
322                self.leaked_scope_ids.borrow_mut().insert(*id);
323            }
324        }
325        ret
326    }
327
328    fn lookup_function(&self, identifier: &FunctionIdentifier) -> Option<&FunctionLike> {
329        let ret = self.functions.get(identifier);
330        if let Some(FunctionLike::Closure(ScopedSlot(id, _))) = ret {
331            if id != &self.id {
332                self.leaked_scope_ids.borrow_mut().insert(*id);
333            }
334        }
335        ret
336    }
337
338    fn lookup_label(&self, name: &Identifier) -> Option<&ScopedSlot> {
339        let ret = self.labels.get(name);
340        if let Some(ScopedSlot(id, _)) = ret {
341            if id != &self.id {
342                self.leaked_scope_ids.borrow_mut().insert(*id);
343            }
344        }
345        ret
346    }
347}
348
349pub struct Compiler {
350    emitter: CodeEmitter,
351    next_scope_id: ScopeId,
352    scope_stack: Vec<Scope>,
353}
354
355struct SavedScope(Scope);
356
357trait Compile {
358    fn compile(&self, compiler: &mut Compiler, next: Address) -> Result<Address>;
359}
360
361impl Compile for Value {
362    fn compile(&self, compiler: &mut Compiler, next: Address) -> Result<Address> {
363        Ok(compiler.emitter.emit_constant(self.clone(), next))
364    }
365}
366
367impl Compile for Query {
368    fn compile(&self, compiler: &mut Compiler, next: Address) -> Result<Address> {
369        compiler.compile_query(self, next)
370    }
371}
372
373impl Compile for Term {
374    fn compile(&self, compiler: &mut Compiler, next: Address) -> Result<Address> {
375        compiler.compile_term(self, next)
376    }
377}
378
379impl Compile for Box<Query> {
380    fn compile(&self, compiler: &mut Compiler, next: Address) -> Result<Address> {
381        compiler.compile_query(self.as_ref(), next)
382    }
383}
384
385impl<F> Compile for F
386where
387    F: Fn(&mut Compiler, Address) -> Result<Address>,
388{
389    fn compile(&self, compiler: &mut Compiler, next: Address) -> Result<Address> {
390        self(compiler, next)
391    }
392}
393
394impl Default for Compiler {
395    fn default() -> Self {
396        Self::new()
397    }
398}
399
400impl Compiler {
401    pub fn new() -> Self {
402        Self {
403            emitter: CodeEmitter::new(),
404            next_scope_id: ScopeId(1),
405            scope_stack: vec![Scope::new(ScopeId(0))],
406        }
407    }
408
409    fn current_scope(&self) -> &Scope {
410        self.scope_stack
411            .last()
412            .expect("Scope stack shouldn't be empty")
413    }
414
415    fn current_scope_mut(&mut self) -> &mut Scope {
416        self.scope_stack
417            .last_mut()
418            .expect("Scope stack shouldn't be empty")
419    }
420
421    fn save_scope(&mut self) -> SavedScope {
422        SavedScope(self.current_scope_mut().clone())
423    }
424
425    fn restore_scope(&mut self, save: SavedScope) {
426        let mut save = save.0;
427        let current = self.current_scope_mut();
428        assert_eq!(current.id, save.id);
429        save.next_variable_slot_id = current.next_variable_slot_id;
430        save.next_closure_slot_id = current.next_closure_slot_id;
431        save.next_label_slot_id = current.next_label_slot_id;
432        *current = save;
433    }
434
435    fn allocate_variable(&mut self) -> ScopedSlot {
436        self.current_scope_mut().allocate_variable()
437    }
438
439    fn register_variable(&mut self, name: Identifier) -> ScopedSlot {
440        self.current_scope_mut().register_variable(name)
441    }
442
443    fn register_closure(&mut self, name: Identifier) -> ScopedSlot {
444        self.current_scope_mut().register_closure(name)
445    }
446
447    fn register_function(&mut self, name: Identifier, function: DeclaredFunction) {
448        self.current_scope_mut().register_function(name, function)
449    }
450
451    fn enter_scope(&mut self) -> ScopeId {
452        let new_scope = Scope::nested(self.next_scope_id, self.current_scope());
453        self.scope_stack.push(new_scope);
454        let ret = self.next_scope_id;
455        self.next_scope_id.0 += 1;
456        ret
457    }
458
459    fn current_scope_require_slot(&self) -> bool {
460        self.current_scope().require_slot()
461    }
462
463    fn exit_scope(&mut self, id: ScopeId) -> (usize, usize, usize) {
464        let scope = self
465            .scope_stack
466            .pop()
467            .expect("Scope stack shouldn't be empty");
468        assert_eq!(id, scope.id);
469        (
470            scope.next_variable_slot_id,
471            scope.next_closure_slot_id,
472            scope.next_label_slot_id,
473        )
474    }
475
476    fn exit_scope_and_emit_new_frame(&mut self, id: ScopeId, next: Address) -> Address {
477        let scope_info = self.exit_scope(id);
478        self.emitter.emit_normal_op(
479            ByteCode::NewFrame {
480                id,
481                variable_cnt: scope_info.0,
482                closure_cnt: scope_info.1,
483                label_cnt: scope_info.2,
484            },
485            next,
486        )
487    }
488
489    fn exit_global_scope_and_emit_new_frame(&mut self, next: Address) -> Address {
490        self.exit_scope_and_emit_new_frame(ScopeId(0), next)
491    }
492
493    fn lookup_variable(&self, name: &Identifier) -> Result<&ScopedSlot> {
494        self.current_scope()
495            .lookup_variable(name)
496            .ok_or_else(|| CompileError::UnknownVariable(name.clone()))
497    }
498
499    fn lookup_compilable_intrinsic(function: &FunctionIdentifier) -> Option<FunctionLike> {
500        Some(match function {
501            FunctionIdentifier(Identifier(name), 0) => match name.as_str() {
502                "empty" => FunctionLike::Intrinsic(ByteCode::Backtrack, vec![]),
503                "input" => FunctionLike::Intrinsic(ByteCode::Input, vec![]),
504                _ => return None,
505            },
506            FunctionIdentifier(Identifier(name), 1) => match name.as_str() {
507                "path" => FunctionLike::ManuallyImplemented("path", |compiler, args, next| {
508                    assert_eq!(1, args.len());
509                    let next = compiler
510                        .emitter
511                        .emit_normal_op(ByteCode::ExitPathTracking, next);
512                    let next = compiler.compile_query(&args[0], next)?;
513                    let next = compiler
514                        .emitter
515                        .emit_normal_op(ByteCode::EnterPathTracking, next);
516                    Ok(next)
517                }),
518                "getpath" => {
519                    FunctionLike::ManuallyImplemented("getpath", |compiler, args, next| {
520                        assert_eq!(1, args.len());
521                        let next = compiler.emitter.emit_normal_op(ByteCode::Access, next);
522                        let next = compiler.compile_query(&args[0], next)?;
523                        let next = compiler.emitter.emit_normal_op(ByteCode::Dup, next);
524                        Ok(next)
525                    })
526                }
527                _ => return None,
528            },
529            _ => return None,
530        })
531    }
532
533    fn lookup_function(&self, function: &FunctionIdentifier) -> Result<FunctionLike> {
534        self.current_scope()
535            .lookup_function(function)
536            .cloned()
537            .or_else(|| Self::lookup_compilable_intrinsic(function))
538            .or_else(|| {
539                intrinsic::lookup_intrinsic_fn(function)
540                    .map(|(code, args)| FunctionLike::Intrinsic(code, args))
541            })
542            .ok_or_else(|| CompileError::UnknownFunction(function.0.clone(), function.1))
543    }
544
545    /// Consumes nothing, produces nothing. Just places the code.
546    fn compile_function_inner(&mut self, args: &[FuncArg], body: &Query) -> Result<Address> {
547        #[allow(clippy::needless_collect)] // This collect is needed to unborrow `self`
548        let slots: Vec<_> = args
549            .iter()
550            .map(|arg| match arg {
551                FuncArg::Variable(name) => self.register_variable(name.clone()),
552                FuncArg::Closure(name) => self.register_closure(name.clone()),
553            })
554            .collect();
555
556        let next = self.emitter.emit_terminal_op(ByteCode::Ret);
557        let mut next = self.compile_query(body, next)?;
558        for (arg, slot) in args.iter().zip(slots.into_iter()) {
559            next = match arg {
560                FuncArg::Variable(_) => self.emitter.emit_normal_op(ByteCode::Store(slot), next),
561                FuncArg::Closure(_) => self
562                    .emitter
563                    .emit_normal_op(ByteCode::StoreClosure(slot), next),
564            };
565        }
566        Ok(next)
567    }
568
569    /// Consumes nothing, produces nothing. Just places the code.
570    fn compile_closure(&mut self, closure: &Query) -> Result<Address> {
571        let scope_id = self.enter_scope();
572        let next = self.compile_function_inner(&[], closure)?;
573        let next = self.exit_scope_and_emit_new_frame(scope_id, next);
574        Ok(next)
575    }
576
577    /// Consumes nothing, produces nothing. Registers function to the current scope.
578    fn compile_funcdef(&mut self, func: &FuncDef) -> Result<()> {
579        let (func_address, placeholder) = self.emitter.emit_terminal_placeholder();
580        let (tail_call_discard_frame, tail_call_discard_frame_placeholder) =
581            self.emitter.emit_terminal_placeholder();
582        let (tail_call_preserve_frame, tail_call_preserve_frame_placeholder) =
583            self.emitter.emit_terminal_placeholder();
584        let arg_types = func
585            .args
586            .iter()
587            .map(|arg| match arg {
588                FuncArg::Variable(_) => ArgType::Value,
589                FuncArg::Closure(_) => ArgType::Closure,
590            })
591            .collect();
592        self.register_function(
593            func.name.clone(),
594            DeclaredFunction {
595                address: func_address,
596                tail_call_discard_frame,
597                tail_call_preserve_frame,
598                arg_types,
599            },
600        );
601
602        let scope_id = self.enter_scope();
603        let function_body = self.compile_function_inner(&func.args, &func.body)?;
604        let require_slot = self.current_scope_require_slot();
605        let real_address = self.exit_scope_and_emit_new_frame(scope_id, function_body);
606        self.emitter
607            .replace_placeholder(placeholder, ByteCode::Jump(real_address));
608        if require_slot {
609            self.emitter.replace_placeholder(
610                tail_call_discard_frame_placeholder,
611                ByteCode::TailCall(real_address),
612            );
613            self.emitter.replace_placeholder(
614                tail_call_preserve_frame_placeholder,
615                ByteCode::CallChainRet(real_address),
616            );
617        } else {
618            self.emitter.replace_placeholder(
619                tail_call_discard_frame_placeholder,
620                ByteCode::Jump(function_body),
621            );
622            self.emitter.replace_placeholder(
623                tail_call_preserve_frame_placeholder,
624                ByteCode::Jump(function_body),
625            );
626        }
627        Ok(())
628    }
629
630    fn compile_func_call_args(
631        &mut self,
632        args: &[Query],
633        types: &[ArgType],
634        mut next: Address,
635    ) -> Result<Address> {
636        assert_eq!(args.len(), types.len());
637        // Compile closures first to reduce number of jumps
638        let mut closure_addresses = vec![];
639        for (arg, ty) in args.iter().zip(types.iter()) {
640            if ty == &ArgType::Closure {
641                closure_addresses.push(Some(self.compile_closure(arg)?));
642            } else {
643                closure_addresses.push(None);
644            }
645        }
646        // We need to evaluate all value-typed arguments on the same current context (stack top).
647        // To do so, we dup, calc and swap to shift a copy of the context up
648        let mut require_context = false;
649        for ((arg, ty), closure) in args.iter().zip(types.iter()).zip(closure_addresses).rev() {
650            match ty {
651                ArgType::Closure => {
652                    let closure =
653                        ClosureAddress(closure.expect("closures should be compiled already"));
654                    /*
655                    TODO: Uncomment on #14
656                    if require_context {
657                        next = self.emitter.emit_normal_op(ByteCode::Swap, next);
658                    }
659                     */
660                    // TODO: Optimize closures that doesn't require frames saved.
661                    next = self
662                        .emitter
663                        .emit_normal_op(ByteCode::PushClosure(closure), next);
664                }
665                ArgType::Value => {
666                    next = self
667                        .emitter
668                        .emit_normal_op(ByteCode::ExitNonPathTracking, next);
669                    if require_context {
670                        next = self.emitter.emit_normal_op(ByteCode::Swap, next);
671                    }
672                    next = self.compile_query(arg, next)?;
673                    if require_context {
674                        next = self.emitter.emit_normal_op(ByteCode::Dup, next);
675                    } else {
676                        require_context = true;
677                    }
678                    next = self
679                        .emitter
680                        .emit_normal_op(ByteCode::EnterNonPathTracking, next);
681                }
682            }
683        }
684        if require_context {
685            next = self.emitter.emit_normal_op(ByteCode::Dup, next);
686        }
687        Ok(next)
688    }
689
690    /// Consumes a value from the stack, and produces a single value onto the stack.
691    fn compile_func_call(
692        &mut self,
693        function: FunctionLike,
694        args: &[Query],
695        next: Address,
696    ) -> Result<Address> {
697        Ok(match function {
698            FunctionLike::Function(DeclaredFunction {
699                address,
700                tail_call_discard_frame,
701                tail_call_preserve_frame,
702                arg_types,
703            }) => {
704                let call = if matches!(self.emitter.get_next_op(next), ByteCode::Ret) {
705                    // The destination `tail_call_*_frame` will be (or is already) replaced with either
706                    // a `ByteCode::TailCall(new_frame_address)` or a `ByteCode::Jump(after_new_frame_address)`
707                    // depending on whether the function requires a slot.
708                    // To use `ByteCode::TailCall`, we can't have any slot that are referenced from the function we're calling,
709                    // since we'll discard the current frame.
710                    // As an estimation, we care if there's any slot on the current scope already referenced from another scope.
711                    // The function we call, and functions that can be invoked from there
712                    // are either a function that we've already compiled, or a function currently in the scope stack.
713                    // Either way, this heuristic should catch the usage of a slot by them.
714                    let can_discard_frame = !self.current_scope().has_slot_leaked_scope();
715                    if can_discard_frame {
716                        self.emitter
717                            .emit_terminal_op(ByteCode::Jump(tail_call_discard_frame))
718                    } else {
719                        self.emitter
720                            .emit_terminal_op(ByteCode::Jump(tail_call_preserve_frame))
721                    }
722                } else {
723                    self.emitter.emit_normal_op(ByteCode::Call(address), next)
724                };
725                // Since value-args doesn't have to be preserved and closure-args save their frames,
726                // we don't need these args to be tracked for the above condition.
727                self.compile_func_call_args(args, &arg_types, call)?
728            }
729            FunctionLike::Closure(slot) => {
730                assert!(args.is_empty());
731                if matches!(self.emitter.get_next_op(next), ByteCode::Ret)
732                    && !self.current_scope().has_slot_leaked_scope()
733                {
734                    // log::info!("Tail call closure for slot {:?}", slot);
735                    self.emitter
736                        .emit_terminal_op(ByteCode::TailCallClosure(slot))
737                } else {
738                    self.emitter
739                        .emit_normal_op(ByteCode::CallClosure(slot), next)
740                }
741            }
742            FunctionLike::Intrinsic(bytecode, types) => {
743                let next = self.emitter.emit_normal_op(bytecode, next);
744                self.compile_func_call_args(args, &types, next)?
745            }
746            FunctionLike::ManuallyImplemented(_, implementation) => {
747                implementation(self, args, next)?
748            }
749        })
750    }
751
752    fn lookup_and_compile_func_call(
753        &mut self,
754        name: Identifier,
755        args: &[Query],
756        next: Address,
757    ) -> Result<Address> {
758        let resolved = self.lookup_function(&FunctionIdentifier(name, args.len()))?;
759        self.compile_func_call(resolved, args, next)
760    }
761
762    /// Consumes a value from the stack, and produces a single value onto the stack.
763    fn compile_try<T: Compile, U: Compile>(
764        &mut self,
765        body: &T,
766        catch: Option<&U>,
767        next: Address,
768    ) -> Result<Address> {
769        let try_end = self.emitter.emit_normal_op(ByteCode::ForkTryEnd, next);
770        let catch_pc = catch.map(|c| c.compile(self, next)).transpose()?;
771        let body = body.compile(self, try_end)?;
772        Ok(self
773            .emitter
774            .emit_normal_op(ByteCode::ForkTryBegin { catch_pc }, body))
775    }
776
777    /// Consumes a value from the stack, and produces a single value onto the stack.
778    fn compile_if<T: Compile, U: Compile, V: Compile>(
779        &mut self,
780        cond: &T,
781        positive: &U,
782        negative: Option<&V>,
783        next: Address,
784    ) -> Result<Address> {
785        let negative_address = if let Some(negative) = negative {
786            negative.compile(self, next)?
787        } else {
788            next
789        };
790        let positive_address = positive.compile(self, next)?;
791        let next = self
792            .emitter
793            .emit_normal_op(ByteCode::JumpUnless(negative_address), positive_address);
794        let next = self.compile_without_path_tracking(cond, next)?;
795        let next = self.emitter.emit_normal_op(ByteCode::Dup, next);
796        Ok(next)
797    }
798
799    /// Consumes a value from the stack, and produces a single value onto the stack.
800    /// Execution order: `index`, `body`
801    fn compile_index<T: Compile, U: Compile>(
802        &mut self,
803        body: &T,
804        index: &U,
805        next: Address,
806    ) -> Result<Address> {
807        let indexing = self.emitter.emit_normal_op(ByteCode::Index, next);
808        let body = body.compile(self, indexing)?;
809        let swap = self.emitter.emit_normal_op(ByteCode::Swap, body);
810        let index = self.compile_without_path_tracking(index, swap)?;
811        Ok(self.emitter.emit_normal_op(ByteCode::Dup, index))
812    }
813
814    /// Consumes a value from the stack, and produces a single value onto the stack.
815    /// Execution order: `start`, `end`, `body`
816    fn compile_slice<T: Compile, U: Compile, V: Compile>(
817        &mut self,
818        body: &T,
819        start: Option<&U>,
820        end: Option<&V>,
821        next: Address,
822    ) -> Result<Address> {
823        assert!(start.is_some() || end.is_some());
824        let next = self.emitter.emit_normal_op(
825            ByteCode::Slice {
826                start: start.is_some(),
827                end: end.is_some(),
828            },
829            next,
830        );
831        let next = body.compile(self, next)?;
832
833        let next = self
834            .emitter
835            .emit_normal_op(ByteCode::ExitNonPathTracking, next);
836        let next = if let Some(end) = end {
837            let next = self.emitter.emit_normal_op(ByteCode::Swap, next);
838            let next = end.compile(self, next)?;
839            self.emitter.emit_normal_op(ByteCode::Dup, next)
840        } else {
841            next
842        };
843        let next = if let Some(start) = start {
844            let next = self.emitter.emit_normal_op(ByteCode::Swap, next);
845            let next = start.compile(self, next)?;
846            self.emitter.emit_normal_op(ByteCode::Dup, next)
847        } else {
848            next
849        };
850
851        let next = self
852            .emitter
853            .emit_normal_op(ByteCode::EnterNonPathTracking, next);
854        Ok(next)
855    }
856
857    /// Consumes a value from the stack, and produces a single value onto the stack.
858    fn compile_iterate<T: Compile>(&mut self, body: &T, next: Address) -> Result<Address> {
859        let next = self.emitter.emit_normal_op(ByteCode::Each, next);
860        body.compile(self, next)
861    }
862
863    /// Consumes a value from the stack, and produces a single value onto the stack.
864    fn compile_term_suffix(
865        &mut self,
866        term: &Term,
867        suffix: &Suffix,
868        next: Address,
869    ) -> Result<Address> {
870        match suffix {
871            Suffix::Optional => match term {
872                Term::Suffix(term, suffix) if suffix != &Suffix::Optional => {
873                    let next = self.compile_try::<_, Query>(
874                        &Term::Suffix(Term::Identity.into(), suffix.clone()),
875                        None,
876                        next,
877                    )?;
878                    self.compile_term(term, next)
879                }
880                _ => self.compile_try::<_, Query>(term, None, next),
881            },
882            Suffix::Iterate => self.compile_iterate(term, next),
883            Suffix::Index(ident) => self.compile_index(
884                term,
885                &Term::Constant(ConstantPrimitive::String(ident.0.clone())),
886                next,
887            ),
888            Suffix::Query(q) => self.compile_index(term, q, next),
889            Suffix::Slice(start, end) => {
890                self.compile_slice(term, start.as_ref(), end.as_ref(), next)
891            }
892        }
893    }
894
895    /// Consumes a value (object) from the stack, and produces a single value (updated object) onto the stack.
896    fn compile_object_entry(
897        &mut self,
898        context: ScopedSlot,
899        key: &Query,
900        value: &Option<Query>,
901        next: Address,
902    ) -> Result<Address> {
903        let next = self.emitter.emit_normal_op(ByteCode::AppendObject, next);
904        let next = match value {
905            Some(value) => {
906                let next = self.compile_query(value, next)?;
907                self.emitter.emit_normal_op(ByteCode::Load(context), next)
908            }
909            None => {
910                if let Query::Term(term) = key {
911                    if let Term::Variable(ident) = term.as_ref() {
912                        let slot = *self.lookup_variable(ident)?;
913                        let next = self.emitter.emit_normal_op(ByteCode::Load(slot), next);
914                        let next = self
915                            .emitter
916                            .emit_normal_op(ByteCode::Push(Value::string(ident.0.clone())), next);
917                        return Ok(next);
918                    }
919                }
920                let next = self.emitter.emit_normal_op(ByteCode::Index, next);
921                let next = self.emitter.emit_normal_op(ByteCode::Load(context), next);
922                self.emitter.emit_normal_op(ByteCode::Dup, next)
923            }
924        };
925        let next = self.compile_query(key, next)?;
926        let next = self.emitter.emit_normal_op(ByteCode::Load(context), next);
927        Ok(next)
928    }
929
930    /// Consumes a value from the stack, and produces a single value onto the stack.
931    fn compile_object(
932        &mut self,
933        kvs: &[(Query, Option<Query>)],
934        mut next: Address,
935    ) -> Result<Address> {
936        let slot = self.allocate_variable();
937        for (key, value) in kvs.iter().rev() {
938            next = self.compile_object_entry(slot, key, value, next)?
939        }
940        next = self
941            .emitter
942            .emit_normal_op(ByteCode::Push(Value::Object(Default::default())), next);
943        Ok(self.emitter.emit_normal_op(ByteCode::Store(slot), next))
944    }
945
946    fn compile_bind<T: Compile>(
947        &mut self,
948        source: &Term,
949        patterns: &[BindPattern],
950        body: &T,
951        next: Address,
952    ) -> Result<Address> {
953        assert!(!patterns.is_empty());
954        fn collect_variable_occurrences<'a>(
955            pattern: &'a BindPattern,
956            occurrences: &mut HashMap<&'a Identifier, usize>,
957        ) {
958            match pattern {
959                BindPattern::Variable(ident) => {
960                    *occurrences.entry(ident).or_insert(0) += 1;
961                }
962                BindPattern::Array(arr) => {
963                    for p in arr {
964                        collect_variable_occurrences(p, occurrences);
965                    }
966                }
967                BindPattern::Object(obj) => {
968                    for entry in obj {
969                        match entry {
970                            ObjectBindPatternEntry::KeyOnly(ident) => {
971                                *occurrences.entry(ident).or_insert(0) += 1;
972                            }
973                            ObjectBindPatternEntry::ValueOnly(_, p) => {
974                                collect_variable_occurrences(p, occurrences);
975                            }
976                            ObjectBindPatternEntry::KeyAndValue(key, p) => {
977                                *occurrences.entry(key).or_insert(0) += 1;
978                                collect_variable_occurrences(p, occurrences);
979                            }
980                        }
981                    }
982                }
983            }
984        }
985
986        /// Consumes a value from the stack
987        impl Compile for BindPattern {
988            fn compile(&self, compiler: &mut Compiler, next: Address) -> Result<Address> {
989                let next = match self {
990                    BindPattern::Variable(ident) => {
991                        let slot = *compiler.lookup_variable(ident)?;
992                        compiler.emitter.emit_normal_op(ByteCode::Store(slot), next)
993                    }
994                    BindPattern::Array(v) => {
995                        assert!(!v.is_empty());
996                        let mut tmp = next;
997                        for (i, pattern) in v.iter().enumerate().rev() {
998                            tmp = pattern.compile(compiler, tmp)?;
999                            tmp = compiler.emitter.emit_normal_op(ByteCode::Index, tmp);
1000                            tmp = compiler.emitter.emit_normal_op(ByteCode::Swap, tmp);
1001                            tmp = compiler
1002                                .emitter
1003                                .emit_normal_op(ByteCode::Push(Value::number(i)), tmp);
1004                            if i + 1 != v.len() {
1005                                tmp = compiler.emitter.emit_normal_op(ByteCode::Dup, tmp);
1006                            }
1007                        }
1008                        tmp
1009                    }
1010                    BindPattern::Object(entries) => {
1011                        assert!(!entries.is_empty());
1012                        let mut tmp = next;
1013                        for (i, entry) in entries.iter().rev().enumerate() {
1014                            match entry {
1015                                ObjectBindPatternEntry::KeyOnly(key) => {
1016                                    let slot = *compiler.lookup_variable(key)?;
1017                                    tmp =
1018                                        compiler.emitter.emit_normal_op(ByteCode::Store(slot), tmp);
1019                                    tmp = compiler.emitter.emit_normal_op(ByteCode::Index, tmp);
1020                                    tmp = compiler.emitter.emit_normal_op(ByteCode::Swap, tmp);
1021                                    tmp = compiler.emitter.emit_normal_op(
1022                                        ByteCode::Push(Value::string(key.0.clone())),
1023                                        tmp,
1024                                    );
1025                                }
1026                                ObjectBindPatternEntry::ValueOnly(key, value) => {
1027                                    tmp = value.compile(compiler, tmp)?;
1028                                    tmp = compiler.emitter.emit_normal_op(ByteCode::Index, tmp);
1029                                    tmp = compiler.emitter.emit_normal_op(ByteCode::Swap, tmp);
1030                                    tmp = compiler.compile_query(key, tmp)?;
1031                                    tmp = compiler.emitter.emit_normal_op(ByteCode::Dup, tmp);
1032                                }
1033                                ObjectBindPatternEntry::KeyAndValue(key, value) => {
1034                                    tmp = value.compile(compiler, tmp)?;
1035                                    let slot = *compiler.lookup_variable(key)?;
1036                                    tmp =
1037                                        compiler.emitter.emit_normal_op(ByteCode::Store(slot), tmp);
1038                                    tmp = compiler.emitter.emit_normal_op(ByteCode::Dup, tmp);
1039                                    tmp = compiler.emitter.emit_normal_op(ByteCode::Index, tmp);
1040                                    tmp = compiler.emitter.emit_normal_op(ByteCode::Swap, tmp);
1041                                    tmp = compiler.emitter.emit_normal_op(
1042                                        ByteCode::Push(Value::string(key.0.clone())),
1043                                        tmp,
1044                                    );
1045                                }
1046                            }
1047                            if i != 0 {
1048                                tmp = compiler.emitter.emit_normal_op(ByteCode::Dup, tmp);
1049                            }
1050                        }
1051                        tmp
1052                    }
1053                };
1054                Ok(next)
1055            }
1056        }
1057
1058        let mut variables: Vec<HashSet<&Identifier>> = vec![];
1059        for pattern in patterns {
1060            let mut map = HashMap::new();
1061            collect_variable_occurrences(pattern, &mut map);
1062            if let Some((&key, _)) = map.iter().find(|(_, &v)| v > 1) {
1063                return Err(CompileError::SameVariableInPattern(key.clone()));
1064            }
1065            variables.push(map.keys().cloned().collect())
1066        }
1067
1068        let saved = self.save_scope();
1069        for &v in variables.iter().flatten().unique() {
1070            self.register_variable(v.clone());
1071        }
1072        let body = body.compile(self, next)?;
1073        let body = self
1074            .emitter
1075            .emit_normal_op(ByteCode::ExitNonPathTracking, body);
1076        let mut next_alt: Option<Address> = None;
1077
1078        for (i, pattern) in patterns.iter().enumerate().rev() {
1079            let mut tmp = if let Some(next_alt) = next_alt {
1080                let tmp = pattern.compile(self, body)?;
1081                self.emitter
1082                    .emit_normal_op(ByteCode::ForkAlt { fork_pc: next_alt }, tmp)
1083            } else {
1084                pattern.compile(self, body)?
1085            };
1086            if i > 0 {
1087                for prev_ident in variables[i - 1].iter() {
1088                    let slot = *self.lookup_variable(prev_ident)?;
1089                    tmp = self.emitter.emit_normal_op(ByteCode::Store(slot), tmp);
1090                    tmp = self
1091                        .emitter
1092                        .emit_normal_op(ByteCode::Push(Value::Null), tmp);
1093                }
1094            }
1095            if i + 1 != patterns.len() {
1096                tmp = self.emitter.emit_normal_op(ByteCode::Dup, tmp);
1097            }
1098            next_alt = Some(tmp)
1099        }
1100        let mut next = next_alt.unwrap();
1101
1102        for v in variables[1..]
1103            .iter()
1104            .flatten()
1105            .unique()
1106            .filter(|&&v| !variables[0].contains(v))
1107        {
1108            let slot = *self.lookup_variable(v)?;
1109            next = self.emitter.emit_normal_op(ByteCode::Store(slot), next);
1110            next = self
1111                .emitter
1112                .emit_normal_op(ByteCode::Push(Value::Null), next);
1113        }
1114
1115        self.restore_scope(saved);
1116        let next = self.compile_term(source, next)?;
1117        let next = self.emitter.emit_normal_op(ByteCode::Dup, next);
1118        let next = self
1119            .emitter
1120            .emit_normal_op(ByteCode::EnterNonPathTracking, next);
1121        Ok(next)
1122    }
1123
1124    fn compile_string<T: Compile>(
1125        &mut self,
1126        fragments: &[StringFragment],
1127        stringifier: T,
1128        mut next: Address,
1129    ) -> Result<Address> {
1130        if fragments.is_empty() {
1131            return Ok(self
1132                .emitter
1133                .emit_constant(Value::string("".to_string()), next));
1134        } else if fragments.len() == 1 {
1135            if let StringFragment::String(s) = &fragments[0] {
1136                return Ok(self.emitter.emit_constant(Value::string(s.clone()), next));
1137            }
1138        }
1139
1140        let add = intrinsic::binary(&BinaryArithmeticOp::Add);
1141        let slot = self.allocate_variable();
1142
1143        for (i, fragment) in fragments.iter().enumerate() {
1144            if i + 1 != fragments.len() {
1145                next = self
1146                    .emitter
1147                    .emit_normal_op(ByteCode::Intrinsic1(add.clone()), next);
1148            }
1149            match fragment {
1150                StringFragment::String(s) => {
1151                    next = self
1152                        .emitter
1153                        .emit_normal_op(ByteCode::Push(Value::string(s.clone())), next);
1154                }
1155                StringFragment::Query(q) => {
1156                    next = stringifier.compile(self, next)?;
1157                    next = self.compile_query(q, next)?;
1158                    next = self.emitter.emit_normal_op(ByteCode::Load(slot), next);
1159                }
1160            }
1161        }
1162        next = self.emitter.emit_normal_op(ByteCode::Store(slot), next);
1163        Ok(next)
1164    }
1165
1166    /// Consumes a value from the stack, and produces a single value onto the stack.
1167    fn compile_term(&mut self, term: &ast::Term, next: Address) -> Result<Address> {
1168        let ret = match term {
1169            Term::Constant(value) => self.emitter.emit_constant_primitive(value.clone(), next),
1170            Term::String(s) => self.compile_string(
1171                s,
1172                |compiler: &mut Compiler, next| {
1173                    Ok(compiler.emitter.emit_normal_op(
1174                        ByteCode::Intrinsic0(NamedFn0 {
1175                            name: "text",
1176                            func: intrinsic::text,
1177                        }),
1178                        next,
1179                    ))
1180                },
1181                next,
1182            )?,
1183            Term::Identity => next,
1184            Term::Recurse => {
1185                self.lookup_and_compile_func_call(Identifier("recurse".to_string()), &[], next)?
1186            }
1187            Term::Suffix(term, suffix) => self.compile_term_suffix(term, suffix, next)?,
1188            Term::Variable(name) => {
1189                let slot = *self.lookup_variable(name)?;
1190                let load = self.emitter.emit_normal_op(ByteCode::Load(slot), next);
1191                self.emitter.emit_normal_op(ByteCode::Pop, load)
1192            }
1193            Term::FunctionCall { name, args } => {
1194                self.lookup_and_compile_func_call(name.clone(), args, next)?
1195            }
1196            Term::Format(format, str) => {
1197                let stringifier = intrinsic::stringifier(&format.0)
1198                    .ok_or_else(|| CompileError::UnknownStringFormatter(format.clone()))?;
1199                match str {
1200                    Some(s) => self.compile_string(
1201                        s,
1202                        |compiler: &mut Compiler, next| {
1203                            Ok(compiler
1204                                .emitter
1205                                .emit_normal_op(ByteCode::Intrinsic0(stringifier.clone()), next))
1206                        },
1207                        next,
1208                    )?,
1209                    None => self
1210                        .emitter
1211                        .emit_normal_op(ByteCode::Intrinsic0(stringifier), next),
1212                }
1213            }
1214            Term::Query(query) => self.compile_query(query, next)?,
1215            Term::Unary(operator, term) => {
1216                let operator = intrinsic::unary(operator);
1217                let next = self
1218                    .emitter
1219                    .emit_normal_op(ByteCode::Intrinsic0(operator), next);
1220                self.compile_term(term, next)?
1221            }
1222            Term::Object(kvs) => self.compile_object(kvs, next)?,
1223            Term::Array(query) => match query {
1224                None => self.emitter.emit_constant(Array::new(), next),
1225                Some(query) => {
1226                    let slot = self.allocate_variable();
1227                    let load = self.emitter.emit_normal_op(ByteCode::Load(slot), next);
1228                    let load = self.emitter.emit_normal_op(ByteCode::Pop, load);
1229                    let backtrack = self.emitter.backtrack();
1230                    let append = self
1231                        .emitter
1232                        .emit_normal_op(ByteCode::Append(slot), backtrack);
1233                    let query = self.compile_query(query, append)?;
1234                    let next = self.emitter.emit_fork(load, query);
1235                    let next = self.emitter.emit_normal_op(ByteCode::Store(slot), next);
1236                    self.emitter.emit_push(Array::new(), next)
1237                }
1238            },
1239            Term::Break(name) => {
1240                let label_slot = *self
1241                    .current_scope()
1242                    .lookup_label(name)
1243                    .ok_or_else(|| CompileError::UnknownLabel(name.clone()))?;
1244                self.emitter.emit_terminal_op(ByteCode::Break(label_slot))
1245            }
1246        };
1247        Ok(ret)
1248    }
1249
1250    /// Consumes a value from the stack, and produces a single value onto the stack.
1251    fn compile_query(&mut self, query: &ast::Query, next: Address) -> Result<Address> {
1252        let ret = match query {
1253            Query::Term(term) => self.compile_term(term, next)?,
1254            Query::WithFunc { function, query } => {
1255                let saved = self.save_scope();
1256                self.compile_funcdef(function)?;
1257                let next = self.compile_query(query, next)?;
1258                self.restore_scope(saved);
1259                next
1260            }
1261            Query::Pipe { lhs, rhs } => {
1262                let rhs_address = self.compile_query(rhs, next)?;
1263                self.compile_query(lhs, rhs_address)?
1264            }
1265            Query::Concat { lhs, rhs } => {
1266                let rhs_address = self.compile_query(rhs, next)?;
1267                let lhs_address = self.compile_query(lhs, next)?;
1268                self.emitter.emit_fork(rhs_address, lhs_address)
1269            }
1270            Query::Bind {
1271                source,
1272                patterns,
1273                body,
1274            } => self.compile_bind(source, patterns.as_slice(), body, next)?,
1275            Query::Reduce {
1276                source,
1277                pattern,
1278                initial,
1279                accumulator,
1280            } => {
1281                let slot = self.allocate_variable();
1282                let after = self.emitter.emit_normal_op(ByteCode::Load(slot), next);
1283                let after = self.emitter.emit_normal_op(ByteCode::Pop, after);
1284
1285                let next = self.compile_bind(
1286                    source,
1287                    from_ref(pattern),
1288                    &|compiler: &mut Compiler, next: Address| -> Result<Address> {
1289                        let body = compiler.emitter.emit_normal_op(ByteCode::Store(slot), next);
1290                        let body = compiler.compile_query(accumulator, body)?;
1291                        Ok(compiler.emitter.emit_normal_op(ByteCode::Load(slot), body))
1292                    },
1293                    self.emitter.backtrack(),
1294                )?;
1295                let next = self.emitter.emit_normal_op(ByteCode::Store(slot), next);
1296                // TODO: What to do if `initial` produced more than single value?
1297                let next = self.compile_query(initial, next)?;
1298                let next = self.emitter.emit_normal_op(ByteCode::Dup, next);
1299                self.emitter.emit_fork(after, next)
1300            }
1301            Query::ForEach {
1302                source,
1303                pattern,
1304                initial,
1305                update,
1306                extract,
1307            } => {
1308                let slot = self.allocate_variable();
1309
1310                let next = self.compile_bind(
1311                    source,
1312                    from_ref(pattern),
1313                    &|compiler: &mut Compiler, next: Address| -> Result<Address> {
1314                        let next = if let Some(extract) = extract {
1315                            compiler.compile_query(extract, next)?
1316                        } else {
1317                            next
1318                        };
1319                        let next = compiler.emitter.emit_normal_op(ByteCode::Pop, next);
1320                        let next = compiler.emitter.emit_normal_op(ByteCode::Swap, next);
1321                        let body = compiler.emitter.emit_normal_op(ByteCode::Store(slot), next);
1322                        let body = compiler.emitter.emit_normal_op(ByteCode::Dup, body);
1323                        let body = compiler.compile_query(update, body)?;
1324                        Ok(compiler.emitter.emit_normal_op(ByteCode::Load(slot), body))
1325                    },
1326                    next,
1327                )?;
1328                let next = self.emitter.emit_normal_op(ByteCode::Store(slot), next);
1329                // TODO: What to do if `initial` produced more than single value?
1330                let next = self.compile_query(initial, next)?;
1331                self.emitter.emit_normal_op(ByteCode::Dup, next)
1332            }
1333            Query::If {
1334                cond,
1335                positive,
1336                negative,
1337            } => self.compile_if(cond, positive, negative.as_ref(), next)?,
1338            Query::Try { body, catch } => self.compile_try(body, catch.as_ref(), next)?,
1339            Query::Label { label, body } => {
1340                let saved = self.save_scope();
1341                let label_slot = self.current_scope_mut().register_label(label.clone());
1342                let body = self.compile_query(body, next)?;
1343                self.restore_scope(saved);
1344                self.emitter
1345                    .emit_normal_op(ByteCode::ForkLabel(label_slot), body)
1346            }
1347            Query::Operate { lhs, operator, rhs } => match operator {
1348                BinaryOp::Arithmetic(operator) => {
1349                    let operator = intrinsic::binary(operator);
1350                    let next = self
1351                        .emitter
1352                        .emit_normal_op(ByteCode::Intrinsic1(operator), next);
1353                    let next = self.compile_query(lhs, next)?;
1354                    let next = self.emitter.emit_normal_op(ByteCode::Swap, next);
1355                    let next = self.compile_query(rhs, next)?;
1356                    self.emitter.emit_normal_op(ByteCode::Dup, next)
1357                }
1358                BinaryOp::Alt => {
1359                    let found = self.allocate_variable();
1360                    let backtrack = self.emitter.backtrack();
1361                    let rhs = self.compile_query(rhs, next)?;
1362                    let rhs = self
1363                        .emitter
1364                        .emit_normal_op(ByteCode::JumpUnless(rhs), backtrack);
1365                    let rhs = self.emitter.emit_normal_op(ByteCode::Load(found), rhs);
1366
1367                    let next = self.emitter.emit_normal_op(ByteCode::Store(found), next);
1368                    let next = self.emitter.emit_push(true, next);
1369                    let next = self
1370                        .emitter
1371                        .emit_normal_op(ByteCode::JumpUnless(backtrack), next);
1372                    let next = self.emitter.emit_normal_op(ByteCode::Dup, next);
1373                    let lhs = self.compile_query(lhs, next)?;
1374                    let fork = self.emitter.emit_fork(rhs, lhs);
1375                    let next = self.emitter.emit_normal_op(ByteCode::Store(found), fork);
1376                    self.emitter.emit_push(false, next)
1377                }
1378                BinaryOp::And => self.compile_if(
1379                    lhs,
1380                    &|compiler: &mut Compiler, next| {
1381                        compiler.compile_if(
1382                            rhs,
1383                            &Value::from(true),
1384                            Some(&Value::from(false)),
1385                            next,
1386                        )
1387                    },
1388                    Some(&Value::from(false)),
1389                    next,
1390                )?,
1391                BinaryOp::Or => self.compile_if(
1392                    lhs,
1393                    &Value::from(true),
1394                    Some(&|compiler: &mut Compiler, next| {
1395                        compiler.compile_if(
1396                            rhs,
1397                            &Value::from(true),
1398                            Some(&Value::from(false)),
1399                            next,
1400                        )
1401                    }),
1402                    next,
1403                )?,
1404            },
1405            Query::Update { lhs, operator, rhs } => {
1406                // TODO: Research on the evaluation order.... maybe using `input`?
1407                fn compile_update<T: Compile>(
1408                    compiler: &mut Compiler,
1409                    path_expression: &Query,
1410                    modification: &T,
1411                    next: Address,
1412                ) -> Result<Address> {
1413                    let slot = compiler.allocate_variable();
1414                    let path_slot = compiler.allocate_variable();
1415                    let del_slot = compiler.allocate_variable();
1416                    let label_slot = compiler.current_scope_mut().allocate_label();
1417
1418                    let after = compiler.emitter.emit_normal_op(
1419                        ByteCode::Intrinsic1(NamedFn1 {
1420                            name: "delpaths",
1421                            func: intrinsic::del_paths,
1422                        }),
1423                        next,
1424                    );
1425                    let after = compiler
1426                        .emitter
1427                        .emit_normal_op(ByteCode::Load(del_slot), after);
1428                    let after = compiler.emitter.emit_normal_op(ByteCode::Load(slot), after);
1429                    let after = compiler.emitter.emit_normal_op(ByteCode::Pop, after);
1430
1431                    let on_empty = compiler
1432                        .emitter
1433                        .emit_terminal_op(ByteCode::Break(label_slot));
1434                    let on_empty = compiler
1435                        .emitter
1436                        .emit_normal_op(ByteCode::Store(slot), on_empty);
1437                    let on_empty = compiler
1438                        .emitter
1439                        .emit_normal_op(ByteCode::Append(del_slot), on_empty);
1440                    let on_empty = compiler.emitter.emit_normal_op(ByteCode::Pop, on_empty);
1441
1442                    // for each path(lhs), call set_path(., path, . | getpath(path) | rhs) for the first value produced
1443                    let next = compiler
1444                        .emitter
1445                        .emit_terminal_op(ByteCode::Break(label_slot));
1446                    let next = compiler.emitter.emit_normal_op(ByteCode::Store(slot), next);
1447                    let next = compiler.emitter.emit_normal_op(
1448                        ByteCode::Intrinsic2(NamedFn2 {
1449                            name: "setpath",
1450                            func: intrinsic::set_path,
1451                        }),
1452                        next,
1453                    );
1454                    let next = modification.compile(compiler, next)?;
1455                    let next = compiler.emitter.emit_fork(on_empty, next);
1456                    let next = compiler.emitter.emit_normal_op(
1457                        ByteCode::Intrinsic1(NamedFn1 {
1458                            name: "getpath",
1459                            func: intrinsic::get_path,
1460                        }),
1461                        next,
1462                    );
1463                    let next = compiler
1464                        .emitter
1465                        .emit_normal_op(ByteCode::Load(path_slot), next);
1466                    let next = compiler.emitter.emit_normal_op(ByteCode::Load(slot), next);
1467                    let next = compiler
1468                        .emitter
1469                        .emit_normal_op(ByteCode::Load(path_slot), next);
1470                    let next = compiler.emitter.emit_fork(on_empty, next);
1471                    let next = compiler.emitter.emit_normal_op(ByteCode::Load(slot), next);
1472                    let next = compiler
1473                        .emitter
1474                        .emit_normal_op(ByteCode::ForkLabel(label_slot), next);
1475                    let next = compiler
1476                        .emitter
1477                        .emit_normal_op(ByteCode::Store(path_slot), next);
1478                    let next = compiler
1479                        .emitter
1480                        .emit_normal_op(ByteCode::ExitPathTracking, next);
1481                    let next = compiler.compile_query(path_expression, next)?;
1482                    let next = compiler
1483                        .emitter
1484                        .emit_normal_op(ByteCode::EnterPathTracking, next);
1485                    let next = compiler.emitter.emit_normal_op(ByteCode::Store(slot), next);
1486                    let next = compiler.emitter.emit_normal_op(ByteCode::Dup, next);
1487                    let next = compiler.emitter.emit_fork(after, next);
1488                    let next = compiler
1489                        .emitter
1490                        .emit_normal_op(ByteCode::Store(del_slot), next);
1491                    let next = compiler
1492                        .emitter
1493                        .emit_normal_op(ByteCode::Push(Array::new().into()), next);
1494                    Ok(next)
1495                }
1496
1497                fn compile_assign<T: Compile, U: Compile>(
1498                    compiler: &mut Compiler,
1499                    path_expression: &Query,
1500                    rhs: &T,
1501                    // Called in state [..., value, path, rhs, lhs]. Should combine the top two into one.
1502                    combiner: Option<&U>,
1503                    next: Address,
1504                ) -> Result<Address> {
1505                    let updated_value_slot = compiler.allocate_variable();
1506                    let rhs_slot = compiler.allocate_variable();
1507                    let lhs_slot = combiner.map(|_| compiler.allocate_variable());
1508
1509                    let after = compiler
1510                        .emitter
1511                        .emit_normal_op(ByteCode::Load(updated_value_slot), next);
1512                    let after = compiler.emitter.emit_normal_op(ByteCode::Pop, after);
1513
1514                    let next = compiler.emitter.backtrack();
1515                    let next = compiler
1516                        .emitter
1517                        .emit_normal_op(ByteCode::Store(updated_value_slot), next);
1518                    let next = compiler.emitter.emit_normal_op(
1519                        ByteCode::Intrinsic2(NamedFn2 {
1520                            name: "setpath",
1521                            func: intrinsic::set_path,
1522                        }),
1523                        next,
1524                    );
1525                    let next = if let Some(combiner) = combiner {
1526                        let next = combiner.compile(compiler, next)?;
1527                        let next = compiler
1528                            .emitter
1529                            .emit_normal_op(ByteCode::Load(lhs_slot.unwrap()), next);
1530                        compiler
1531                            .emitter
1532                            .emit_normal_op(ByteCode::Load(rhs_slot), next)
1533                    } else {
1534                        compiler
1535                            .emitter
1536                            .emit_normal_op(ByteCode::Load(rhs_slot), next)
1537                    };
1538                    let next = compiler.emitter.emit_normal_op(ByteCode::Swap, next);
1539                    let next = compiler
1540                        .emitter
1541                        .emit_normal_op(ByteCode::Load(updated_value_slot), next);
1542                    let next = compiler
1543                        .emitter
1544                        .emit_normal_op(ByteCode::ExitPathTracking, next);
1545                    let next = if let Some(slot) = lhs_slot {
1546                        let next = compiler.emitter.emit_normal_op(ByteCode::Store(slot), next);
1547                        compiler.emitter.emit_normal_op(ByteCode::Dup, next)
1548                    } else {
1549                        next
1550                    };
1551                    let next = path_expression.compile(compiler, next)?;
1552                    let next = compiler
1553                        .emitter
1554                        .emit_normal_op(ByteCode::EnterPathTracking, next);
1555
1556                    let next = compiler
1557                        .emitter
1558                        .emit_normal_op(ByteCode::Store(updated_value_slot), next);
1559                    let next = compiler.emitter.emit_normal_op(ByteCode::Dup, next);
1560                    let next = compiler.emitter.emit_fork(after, next);
1561
1562                    let next = compiler
1563                        .emitter
1564                        .emit_normal_op(ByteCode::Store(rhs_slot), next);
1565                    let next = rhs.compile(compiler, next)?;
1566                    let next = compiler.emitter.emit_normal_op(ByteCode::Dup, next);
1567                    Ok(next)
1568                }
1569
1570                match operator {
1571                    UpdateOp::Modify => compile_update(self, lhs, rhs, next)?,
1572                    UpdateOp::Assign => compile_assign::<_, Query>(self, lhs, rhs, None, next)?,
1573                    UpdateOp::Alt => compile_assign(
1574                        self,
1575                        lhs,
1576                        rhs,
1577                        Some(&|compiler: &mut Compiler, next| -> Result<Address> {
1578                            let on_both = compiler.emitter.emit_normal_op(ByteCode::Pop, next);
1579                            let on_falsy = on_both;
1580                            let on_truthy =
1581                                compiler.emitter.emit_normal_op(ByteCode::Swap, on_both);
1582
1583                            let next = compiler
1584                                .emitter
1585                                .emit_normal_op(ByteCode::JumpUnless(on_falsy), on_truthy);
1586                            let next = compiler.emitter.emit_normal_op(ByteCode::Dup, next);
1587                            Ok(next)
1588                        }),
1589                        next,
1590                    )?,
1591                    UpdateOp::Arithmetic(op) => compile_assign(
1592                        self,
1593                        lhs,
1594                        rhs,
1595                        Some(&|compiler: &mut Compiler, next| -> Result<Address> {
1596                            let func = intrinsic::binary(op);
1597                            let next = compiler
1598                                .emitter
1599                                .emit_normal_op(ByteCode::Intrinsic1(func), next);
1600                            Ok(next)
1601                        }),
1602                        next,
1603                    )?,
1604                }
1605            }
1606            Query::Compare {
1607                lhs,
1608                comparator: operator,
1609                rhs,
1610            } => {
1611                let operator = intrinsic::comparator(operator);
1612                let next = self
1613                    .emitter
1614                    .emit_normal_op(ByteCode::Intrinsic1(operator), next);
1615                let next = self.compile_query(lhs, next)?;
1616                let next = self.emitter.emit_normal_op(ByteCode::Swap, next);
1617                let next = self.compile_query(rhs, next)?;
1618                self.emitter.emit_normal_op(ByteCode::Dup, next)
1619            }
1620        };
1621        Ok(ret)
1622    }
1623
1624    fn compile_without_path_tracking<T: Compile>(
1625        &mut self,
1626        body: &T,
1627        next: Address,
1628    ) -> Result<Address> {
1629        let next = self
1630            .emitter
1631            .emit_normal_op(ByteCode::ExitNonPathTracking, next);
1632        let next = body.compile(self, next)?;
1633        let next = self
1634            .emitter
1635            .emit_normal_op(ByteCode::EnterNonPathTracking, next);
1636        Ok(next)
1637    }
1638
1639    fn compile_prelude(&mut self, ast: &ast::Program) -> Result<()> {
1640        assert!(ast.module_header.is_none());
1641        assert!(ast.imports.is_empty());
1642        assert_eq!(ast.query, Term::Identity.into());
1643        for func in &ast.functions {
1644            self.compile_funcdef(func)?
1645        }
1646        Ok(())
1647    }
1648
1649    pub fn compile<M: ModuleLoader>(
1650        &mut self,
1651        ast: &ast::Program,
1652        module_loader: &M,
1653    ) -> Result<Program> {
1654        let preludes = module_loader.prelude()?;
1655        for prelude in preludes {
1656            self.compile_prelude(&prelude)?;
1657        }
1658        if !ast.imports.is_empty() {
1659            todo!()
1660        }
1661        if ast.module_header.is_some() {
1662            todo!()
1663        }
1664        for func in &ast.functions {
1665            self.compile_funcdef(func)?
1666        }
1667        let output = self.emitter.output();
1668        let backtrack = self.emitter.backtrack();
1669        let query_start = self.compile_query(&ast.query, output)?;
1670        let new_frame = self.exit_global_scope_and_emit_new_frame(query_start);
1671        let entry_point = self
1672            .emitter
1673            .emit_normal_op(ByteCode::Call(new_frame), backtrack);
1674        Ok(Program {
1675            code: self.emitter.code.clone(),
1676            entry_point,
1677        })
1678    }
1679}