Skip to main content

oxilean_runtime/tco/
types.rs

1//! Auto-generated module
2//!
3//! 🤖 Generated with [SplitRS](https://github.com/cool-japan/splitrs)
4
5use super::functions::trampoline;
6use std::collections::{HashMap, HashSet};
7
8/// A labeled trampoline step: tag identifies which "function" to run next.
9#[allow(dead_code)]
10pub enum MultiStep<State, Output> {
11    /// Run the "even" branch.
12    Even(State),
13    /// Run the "odd" branch.
14    Odd(State),
15    /// Done with output.
16    Done(Output),
17}
18/// Detects potential infinite loops by tracking state hashes.
19#[allow(dead_code)]
20pub struct LoopDetector {
21    /// Set of observed state fingerprints.
22    seen: std::collections::HashSet<u64>,
23    /// Number of states checked.
24    pub checked: u64,
25}
26impl LoopDetector {
27    /// Create a new detector.
28    #[allow(dead_code)]
29    pub fn new() -> Self {
30        Self {
31            seen: std::collections::HashSet::new(),
32            checked: 0,
33        }
34    }
35    /// Record a state with the given fingerprint.
36    /// Returns `true` if this state was seen before (loop detected).
37    #[allow(dead_code)]
38    pub fn check(&mut self, fingerprint: u64) -> bool {
39        self.checked += 1;
40        !self.seen.insert(fingerprint)
41    }
42    /// Clear history.
43    #[allow(dead_code)]
44    pub fn reset(&mut self) {
45        self.seen.clear();
46        self.checked = 0;
47    }
48    /// Number of unique states seen.
49    #[allow(dead_code)]
50    pub fn unique_states(&self) -> usize {
51        self.seen.len()
52    }
53}
54/// Aggregated TCO metrics across many functions.
55#[allow(dead_code)]
56#[derive(Default)]
57pub struct TrampolineMetricsRegistry {
58    /// Map from function name to metrics.
59    pub functions: std::collections::HashMap<String, FunctionTcoMetrics>,
60}
61impl TrampolineMetricsRegistry {
62    /// Create an empty registry.
63    #[allow(dead_code)]
64    pub fn new() -> Self {
65        Self::default()
66    }
67    /// Record a TCO call for `function` at `depth`.
68    #[allow(dead_code)]
69    pub fn record(&mut self, function: &str, depth: u64) {
70        self.functions
71            .entry(function.to_string())
72            .or_insert_with(|| FunctionTcoMetrics::new(function))
73            .record(depth);
74    }
75    /// Return the top `n` functions by total steps.
76    #[allow(dead_code)]
77    pub fn top_by_steps(&self, n: usize) -> Vec<&FunctionTcoMetrics> {
78        let mut sorted: Vec<&FunctionTcoMetrics> = self.functions.values().collect();
79        sorted.sort_by(|a, b| b.total_steps.cmp(&a.total_steps));
80        sorted.truncate(n);
81        sorted
82    }
83    /// Total calls across all functions.
84    #[allow(dead_code)]
85    pub fn total_calls(&self) -> u64 {
86        self.functions.values().map(|m| m.call_count).sum()
87    }
88}
89/// A trampoline driver with a configurable step limit.
90#[allow(dead_code)]
91pub struct BoundedTrampoline {
92    /// The maximum number of steps allowed.
93    pub step_limit: u64,
94}
95impl BoundedTrampoline {
96    /// Create a bounded trampoline with the given step limit.
97    #[allow(dead_code)]
98    pub fn new(step_limit: u64) -> Self {
99        Self { step_limit }
100    }
101    /// Run `step` to completion, returning `Ok(value)` or an error if the limit
102    /// was exceeded.
103    #[allow(dead_code)]
104    pub fn run<T>(&self, mut step: TailCall<T>) -> Result<T, String> {
105        let mut count = 0u64;
106        loop {
107            match step {
108                TailCall::Done(v) => return Ok(v),
109                TailCall::Call(f) => {
110                    count += 1;
111                    if count > self.step_limit {
112                        return Err(format!(
113                            "trampoline step limit {} exceeded at step {}",
114                            self.step_limit, count
115                        ));
116                    }
117                    step = f();
118                }
119            }
120        }
121    }
122}
123/// A scheduler that drives a `TailCall<T>` in bounded batches.
124#[allow(dead_code)]
125pub struct TailCallScheduler<T> {
126    /// Current pending step (None if already finished or not started).
127    pending: Option<TailCall<T>>,
128    /// Configuration for this scheduler.
129    pub config: TailCallSchedulerConfig,
130    /// Total steps taken so far.
131    pub total_steps: u64,
132}
133impl<T> TailCallScheduler<T> {
134    /// Create a new scheduler with the given step and the default config.
135    #[allow(dead_code)]
136    pub fn new(step: TailCall<T>) -> Self {
137        Self {
138            pending: Some(step),
139            config: TailCallSchedulerConfig::default(),
140            total_steps: 0,
141        }
142    }
143    /// Create a scheduler with a custom config.
144    #[allow(dead_code)]
145    pub fn with_config(step: TailCall<T>, config: TailCallSchedulerConfig) -> Self {
146        Self {
147            pending: Some(step),
148            config,
149            total_steps: 0,
150        }
151    }
152    /// Run up to `max_steps_per_batch` steps.
153    #[allow(dead_code)]
154    pub fn tick(&mut self) -> SchedulerTickResult<T> {
155        let batch = self.config.max_steps_per_batch;
156        for _ in 0..batch {
157            match self.pending.take() {
158                None => return SchedulerTickResult::Pending,
159                Some(TailCall::Done(v)) => return SchedulerTickResult::Finished(v),
160                Some(TailCall::Call(f)) => {
161                    self.total_steps += 1;
162                    if self.total_steps > self.config.step_limit {
163                        return SchedulerTickResult::StepLimitExceeded;
164                    }
165                    self.pending = Some(f());
166                }
167            }
168        }
169        SchedulerTickResult::Pending
170    }
171    /// Run to completion (ignoring the batch limit).
172    #[allow(dead_code)]
173    pub fn run_to_completion(mut self) -> Result<T, String> {
174        loop {
175            match self.tick() {
176                SchedulerTickResult::Finished(v) => return Ok(v),
177                SchedulerTickResult::StepLimitExceeded => {
178                    return Err(format!("step limit {} exceeded", self.config.step_limit));
179                }
180                SchedulerTickResult::Pending => {}
181            }
182        }
183    }
184}
185/// Configuration for loop unrolling.
186#[allow(dead_code)]
187#[derive(Clone, Debug)]
188pub struct UnrollConfig {
189    /// Factor by which to unroll the loop (e.g., 4 = 4 copies of the body).
190    pub factor: usize,
191    /// Maximum loop count to consider for full unrolling.
192    pub full_unroll_limit: usize,
193}
194/// Result of loop unrolling analysis.
195#[allow(dead_code)]
196#[derive(Clone, Debug)]
197pub struct UnrollResult {
198    /// Whether the loop was fully unrolled.
199    pub fully_unrolled: bool,
200    /// Factor used.
201    pub factor: usize,
202    /// Estimated savings (iterations avoided).
203    pub iterations_saved: usize,
204}
205impl UnrollResult {
206    /// Compute unroll result for a loop with `n` iterations.
207    #[allow(dead_code)]
208    pub fn compute(n: usize, cfg: &UnrollConfig) -> Self {
209        if n <= cfg.full_unroll_limit {
210            UnrollResult {
211                fully_unrolled: true,
212                factor: n,
213                iterations_saved: n,
214            }
215        } else {
216            let factor = cfg.factor.min(n);
217            let remainder = n % factor;
218            let main_iters = n / factor;
219            let saved = n - main_iters - remainder;
220            UnrollResult {
221                fully_unrolled: false,
222                factor,
223                iterations_saved: saved,
224            }
225        }
226    }
227}
228/// A simple evaluation context that maps names to values.
229#[allow(dead_code)]
230#[derive(Clone, Debug, Default)]
231pub struct EvaluationContext {
232    /// Variable bindings.
233    pub bindings: std::collections::HashMap<String, u64>,
234    /// Stack depth.
235    pub depth: u32,
236}
237impl EvaluationContext {
238    /// Create an empty context.
239    #[allow(dead_code)]
240    pub fn new() -> Self {
241        Self::default()
242    }
243    /// Bind a variable.
244    #[allow(dead_code)]
245    pub fn bind(&mut self, name: &str, value: u64) {
246        self.bindings.insert(name.to_string(), value);
247    }
248    /// Look up a variable.
249    #[allow(dead_code)]
250    pub fn lookup(&self, name: &str) -> Option<u64> {
251        self.bindings.get(name).copied()
252    }
253    /// Return a child context (incremented depth).
254    #[allow(dead_code)]
255    pub fn child(&self) -> Self {
256        Self {
257            bindings: self.bindings.clone(),
258            depth: self.depth + 1,
259        }
260    }
261    /// Number of bindings.
262    #[allow(dead_code)]
263    pub fn size(&self) -> usize {
264        self.bindings.len()
265    }
266}
267/// A simple peephole optimizer that applies rules to an opcode list.
268#[allow(dead_code)]
269pub struct PeepholeOptimizer {
270    rules: Vec<PeepholeRule>,
271    /// Number of rewrites applied in the last `optimize` call.
272    pub rewrites: usize,
273}
274impl PeepholeOptimizer {
275    /// Create an optimizer with no rules.
276    #[allow(dead_code)]
277    pub fn new() -> Self {
278        Self {
279            rules: Vec::new(),
280            rewrites: 0,
281        }
282    }
283    /// Add a rule.
284    #[allow(dead_code)]
285    pub fn add_rule(&mut self, rule: PeepholeRule) {
286        self.rules.push(rule);
287    }
288    /// Apply all rules to `opcodes`, returning the optimized sequence.
289    #[allow(dead_code)]
290    pub fn optimize<'a>(&mut self, opcodes: &[&'a str]) -> Vec<&'a str> {
291        let mut result: Vec<&'a str> = opcodes.to_vec();
292        let mut changed = true;
293        self.rewrites = 0;
294        while changed {
295            changed = false;
296            'outer: for rule in &self.rules {
297                let plen = rule.pattern.len();
298                if plen == 0 {
299                    continue;
300                }
301                let mut i = 0;
302                while i + plen <= result.len() {
303                    if result[i..i + plen] == rule.pattern[..] {
304                        let mut new_result = result[..i].to_vec();
305                        new_result.extend_from_slice(&rule.replacement);
306                        new_result.extend_from_slice(&result[i + plen..]);
307                        result = new_result;
308                        self.rewrites += 1;
309                        changed = true;
310                        continue 'outer;
311                    }
312                    i += 1;
313                }
314            }
315        }
316        result
317    }
318}
319/// A simple heuristic for identifying tail-position `Call` instructions
320/// in a flat bytecode sequence.
321///
322/// The rule is: a `Call` instruction is in tail position if it is
323/// immediately followed by a `Return` instruction (with no intervening
324/// instructions that produce stack values consumed after the return).
325pub struct TailCallDetector {
326    /// Positions (0-based) of instructions that are in tail position.
327    pub tail_positions: Vec<usize>,
328}
329impl TailCallDetector {
330    /// Create a new detector (no positions identified yet).
331    pub fn new() -> Self {
332        TailCallDetector {
333            tail_positions: Vec::new(),
334        }
335    }
336    /// Analyse a sequence of opcode names (strings) and populate
337    /// [`tail_positions`](Self::tail_positions).
338    ///
339    /// The opcode names are strings like `"Call"`, `"Return"`, `"Halt"`,
340    /// matching what [`crate::bytecode_interp::Opcode`] would produce via
341    /// `Debug`. This keeps the detector decoupled from the specific
342    /// `Opcode` enum variant structure.
343    pub fn analyse(&mut self, opcode_names: &[&str]) {
344        self.tail_positions.clear();
345        for (i, name) in opcode_names.iter().enumerate() {
346            if *name == "Call" {
347                let next = opcode_names.get(i + 1).copied().unwrap_or("");
348                if next == "Return" || next == "Halt" {
349                    self.tail_positions.push(i);
350                }
351            }
352        }
353    }
354    /// Returns `true` if position `idx` was identified as a tail call.
355    pub fn is_tail(&self, idx: usize) -> bool {
356        self.tail_positions.contains(&idx)
357    }
358    /// Number of tail-call positions found.
359    pub fn count(&self) -> usize {
360        self.tail_positions.len()
361    }
362}
363/// Aggregate statistics for a batch of TCO-enabled computations.
364#[allow(dead_code)]
365#[derive(Clone, Debug, Default)]
366pub struct TcoStatistics {
367    /// Total number of computations run.
368    pub total_runs: u64,
369    /// Total trampoline steps across all runs.
370    pub total_steps: u64,
371    /// Maximum depth seen in any single run.
372    pub global_max_depth: u64,
373    /// Number of runs that hit the step limit.
374    pub step_limit_hits: u64,
375}
376impl TcoStatistics {
377    /// Create zeroed statistics.
378    #[allow(dead_code)]
379    pub fn new() -> Self {
380        Self::default()
381    }
382    /// Record the result of a single run.
383    #[allow(dead_code)]
384    pub fn record_run(&mut self, counter: &TailCallCounter, hit_limit: bool) {
385        self.total_runs += 1;
386        self.total_steps += counter.optimized;
387        if counter.max_depth > self.global_max_depth {
388            self.global_max_depth = counter.max_depth;
389        }
390        if hit_limit {
391            self.step_limit_hits += 1;
392        }
393    }
394    /// Average steps per run.
395    #[allow(dead_code)]
396    pub fn avg_steps(&self) -> f64 {
397        if self.total_runs == 0 {
398            0.0
399        } else {
400            self.total_steps as f64 / self.total_runs as f64
401        }
402    }
403    /// Format as a summary string.
404    #[allow(dead_code)]
405    pub fn summary(&self) -> String {
406        format!(
407            "TcoStats: runs={}, total_steps={}, max_depth={}, limit_hits={}, avg_steps={:.2}",
408            self.total_runs,
409            self.total_steps,
410            self.global_max_depth,
411            self.step_limit_hits,
412            self.avg_steps()
413        )
414    }
415}
416/// A simulated stack frame for explicit-stack recursion.
417#[allow(dead_code)]
418#[derive(Clone, Debug)]
419pub struct StackFrame {
420    /// The function name at this frame.
421    pub function: String,
422    /// Local variable bindings as `(name, value)` pairs.
423    pub locals: Vec<(String, u64)>,
424    /// The return address (index into a bytecode chunk).
425    pub return_address: usize,
426}
427impl StackFrame {
428    /// Create a new stack frame.
429    #[allow(dead_code)]
430    pub fn new(function: &str, return_address: usize) -> Self {
431        Self {
432            function: function.to_string(),
433            locals: Vec::new(),
434            return_address,
435        }
436    }
437    /// Add a local variable binding.
438    #[allow(dead_code)]
439    pub fn bind(&mut self, name: &str, value: u64) {
440        self.locals.push((name.to_string(), value));
441    }
442    /// Look up a local variable by name.
443    #[allow(dead_code)]
444    pub fn lookup(&self, name: &str) -> Option<u64> {
445        self.locals
446            .iter()
447            .rev()
448            .find(|(n, _)| n == name)
449            .map(|(_, v)| *v)
450    }
451}
452/// Threshold configuration for inlining decisions.
453#[allow(dead_code)]
454#[derive(Clone, Debug)]
455pub struct InliningThreshold {
456    /// Maximum size (in opcodes) of a function that may be inlined.
457    pub max_size: usize,
458    /// Maximum call depth after which inlining is suppressed.
459    pub max_depth: usize,
460    /// Minimum call count before inlining is triggered.
461    pub min_call_count: u64,
462}
463/// A builder that converts a two-argument accumulator-style recursion
464/// into a trampolined computation.
465///
466/// ```
467/// # use oxilean_runtime::tco::{RecursiveStep, trampoline};
468/// // factorial via recursive step
469/// let result = RecursiveStep::run(10u64, 1u64, |n, acc| {
470///     if n == 0 { None } else { Some((n - 1, n * acc)) }
471/// });
472/// assert_eq!(result, 3628800);
473/// ```
474pub struct RecursiveStep;
475impl RecursiveStep {
476    /// Run the accumulator loop.
477    ///
478    /// - `initial`: the initial input value.
479    /// - `acc`: the initial accumulator.
480    /// - `step`: given `(input, acc)`, returns `Some((next_input, next_acc))`
481    ///   to continue, or `None` to stop and return `acc`.
482    pub fn run<I, A, F>(initial: I, acc: A, step: F) -> A
483    where
484        I: Clone + 'static,
485        A: Clone + 'static,
486        F: Fn(I, A) -> Option<(I, A)> + Clone + 'static,
487    {
488        fn go<
489            I: Clone + 'static,
490            A: Clone + 'static,
491            F: Fn(I, A) -> Option<(I, A)> + Clone + 'static,
492        >(
493            i: I,
494            a: A,
495            f: F,
496        ) -> TailCall<A> {
497            match f(i, a.clone()) {
498                None => TailCall::Done(a),
499                Some((next_i, next_a)) => {
500                    let f2 = f.clone();
501                    TailCall::Call(Box::new(move || go(next_i, next_a, f2)))
502                }
503            }
504        }
505        trampoline(go(initial, acc, step))
506    }
507}
508/// Kind of binary operation.
509#[allow(dead_code)]
510#[derive(Clone, Copy, Debug, PartialEq, Eq)]
511pub enum BinopKind {
512    Add,
513    Sub,
514    Mul,
515    Div,
516}
517impl BinopKind {
518    /// Evaluate this binary operation.
519    #[allow(dead_code)]
520    pub fn eval(self, lhs: u64, rhs: u64) -> Option<u64> {
521        match self {
522            BinopKind::Add => Some(lhs.wrapping_add(rhs)),
523            BinopKind::Sub => Some(lhs.wrapping_sub(rhs)),
524            BinopKind::Mul => Some(lhs.wrapping_mul(rhs)),
525            BinopKind::Div => {
526                if rhs == 0 {
527                    None
528                } else {
529                    Some(lhs / rhs)
530                }
531            }
532        }
533    }
534}
535/// The result of a step in a trampoline loop.
536///
537/// A function that supports TCO returns `TailCall<T>` instead of `T`:
538/// - [`TailCall::Done`] means the final value is ready.
539/// - [`TailCall::Call`] means there is another step to execute.
540pub enum TailCall<T> {
541    /// Computation is finished; `T` is the final value.
542    Done(T),
543    /// Another step is needed; the boxed closure produces the next
544    /// `TailCall<T>` without growing the call stack.
545    Call(Box<dyn FnOnce() -> TailCall<T>>),
546}
547/// Result of a single interpreter step (for a TCO-aware interpreter loop).
548pub enum StepResult<State, Output> {
549    /// Continue with an updated state.
550    Continue(State),
551    /// Computation finished with this output.
552    Finished(Output),
553    /// An error occurred.
554    Error(String),
555}
556/// High-level optimizer that applies TCO analysis to bytecode chunks.
557#[allow(dead_code)]
558pub struct TailCallOptimizer {
559    detector: TailCallDetector,
560    /// Statistics accumulator.
561    pub stats: TailCallCounter,
562}
563impl TailCallOptimizer {
564    /// Create a new optimizer.
565    #[allow(dead_code)]
566    pub fn new() -> Self {
567        Self {
568            detector: TailCallDetector::new(),
569            stats: TailCallCounter::new(),
570        }
571    }
572    /// Analyse a chunk of opcodes and return the report.
573    #[allow(dead_code)]
574    pub fn analyse_chunk(&mut self, opcodes: &[&str]) -> TailCallAnalysisReport {
575        self.detector.analyse(opcodes);
576        let report = TailCallAnalysisReport::build(&self.detector, opcodes);
577        self.stats.optimized += report.tail_positions.len() as u64;
578        report
579    }
580    /// Returns `true` if the given index is a tail call.
581    #[allow(dead_code)]
582    pub fn is_tail_call(&self, idx: usize) -> bool {
583        self.detector.is_tail(idx)
584    }
585}
586/// Analysis result for a bytecode chunk, pairing detection results with stats.
587#[allow(dead_code)]
588#[derive(Clone, Debug)]
589pub struct TailCallAnalysisReport {
590    /// Indices of tail-position call instructions.
591    pub tail_positions: Vec<usize>,
592    /// Total number of call instructions found.
593    pub total_calls: usize,
594    /// Fraction of calls that are in tail position.
595    pub tail_ratio: f64,
596    /// Human-readable summary.
597    pub summary: String,
598}
599impl TailCallAnalysisReport {
600    /// Build a report from a detector result and the original opcodes.
601    #[allow(dead_code)]
602    pub fn build(detector: &TailCallDetector, opcodes: &[&str]) -> Self {
603        let total_calls = opcodes.iter().filter(|&&op| op == "Call").count();
604        let tail_count = detector.count();
605        let tail_ratio = if total_calls == 0 {
606            0.0
607        } else {
608            tail_count as f64 / total_calls as f64
609        };
610        let summary = format!(
611            "{}/{} calls ({:.0}%) are in tail position",
612            tail_count,
613            total_calls,
614            tail_ratio * 100.0
615        );
616        Self {
617            tail_positions: detector.tail_positions.clone(),
618            total_calls,
619            tail_ratio,
620            summary,
621        }
622    }
623}
624/// Extended detector that classifies tail positions more finely.
625#[allow(dead_code)]
626pub struct ExtendedTailCallDetector {
627    /// Current function being analyzed.
628    pub current_function: String,
629    /// Positions and their classifications.
630    pub classified: Vec<(usize, TailPositionKind)>,
631}
632impl ExtendedTailCallDetector {
633    /// Create a new detector for `function_name`.
634    #[allow(dead_code)]
635    pub fn new(function_name: &str) -> Self {
636        Self {
637            current_function: function_name.to_string(),
638            classified: Vec::new(),
639        }
640    }
641    /// Analyse a list of `(opcode_name, callee_name)` pairs.
642    #[allow(dead_code)]
643    pub fn analyse_with_callees(&mut self, ops: &[(&str, Option<&str>)]) {
644        self.classified.clear();
645        for (i, (op, callee)) in ops.iter().enumerate() {
646            if *op != "Call" {
647                continue;
648            }
649            let next = ops.get(i + 1).map(|(o, _)| *o).unwrap_or("");
650            if next != "Return" && next != "Halt" {
651                self.classified.push((i, TailPositionKind::NonTail));
652                continue;
653            }
654            let kind = match *callee {
655                Some(name) if name == self.current_function => TailPositionKind::SelfTailCall,
656                Some(_) => TailPositionKind::MutualTailCall,
657                None => TailPositionKind::ExternalTailCall,
658            };
659            self.classified.push((i, kind));
660        }
661    }
662    /// Count how many positions are of the given kind.
663    #[allow(dead_code)]
664    pub fn count_kind(&self, kind: TailPositionKind) -> usize {
665        self.classified.iter().filter(|(_, k)| *k == kind).count()
666    }
667}
668/// More fine-grained classification of a tail position.
669#[allow(dead_code)]
670#[derive(Clone, Copy, Debug, PartialEq, Eq)]
671pub enum TailPositionKind {
672    /// Direct self-tail-call (trivially TCO-able).
673    SelfTailCall,
674    /// Tail call to a sibling function (mutually recursive TCO).
675    MutualTailCall,
676    /// Tail call to an unknown/external function.
677    ExternalTailCall,
678    /// Not a tail call.
679    NonTail,
680}
681/// A thunk: either an unevaluated computation or a memoized value.
682#[allow(dead_code)]
683pub enum Thunk<T> {
684    /// Not yet evaluated.
685    Deferred(Box<dyn FnOnce() -> T>),
686    /// Already evaluated; value is cached.
687    Evaluated(T),
688}
689impl<T: Clone> Thunk<T> {
690    /// Create a deferred thunk.
691    #[allow(dead_code)]
692    pub fn defer(f: impl FnOnce() -> T + 'static) -> Self {
693        Thunk::Deferred(Box::new(f))
694    }
695    /// Force evaluation and memoize the result.
696    #[allow(dead_code)]
697    pub fn force(&mut self) -> &T {
698        match self {
699            Thunk::Evaluated(ref v) => v,
700            Thunk::Deferred(_) => {
701                let Thunk::Deferred(f) =
702                    std::mem::replace(self, Thunk::Evaluated(unsafe { std::mem::zeroed() }))
703                else {
704                    unreachable!()
705                };
706                let val = f();
707                *self = Thunk::Evaluated(val);
708                match self {
709                    Thunk::Evaluated(ref v) => v,
710                    _ => unreachable!(),
711                }
712            }
713        }
714    }
715}
716/// A simple symbolic rewrite rule.
717#[allow(dead_code)]
718#[derive(Clone, Debug)]
719pub struct RewriteRule {
720    /// Pattern string (LHS).
721    pub lhs: String,
722    /// Replacement string (RHS).
723    pub rhs: String,
724    /// Whether this rule fires unconditionally.
725    pub unconditional: bool,
726}
727impl RewriteRule {
728    /// Create a new unconditional rewrite rule.
729    #[allow(dead_code)]
730    pub fn new(lhs: &str, rhs: &str) -> Self {
731        Self {
732            lhs: lhs.to_string(),
733            rhs: rhs.to_string(),
734            unconditional: true,
735        }
736    }
737    /// Create a conditional rewrite rule.
738    #[allow(dead_code)]
739    pub fn conditional(lhs: &str, rhs: &str) -> Self {
740        Self {
741            lhs: lhs.to_string(),
742            rhs: rhs.to_string(),
743            unconditional: false,
744        }
745    }
746}
747/// Per-function trampoline metrics.
748#[allow(dead_code)]
749#[derive(Clone, Debug, Default)]
750pub struct FunctionTcoMetrics {
751    /// Name of the function.
752    pub name: String,
753    /// Number of times this function was called via trampoline.
754    pub call_count: u64,
755    /// Maximum recursion depth eliminated for this function.
756    pub max_depth_eliminated: u64,
757    /// Total steps this function contributed.
758    pub total_steps: u64,
759}
760impl FunctionTcoMetrics {
761    /// Create metrics for a named function.
762    #[allow(dead_code)]
763    pub fn new(name: &str) -> Self {
764        Self {
765            name: name.to_string(),
766            ..Default::default()
767        }
768    }
769    /// Record a call at the given depth.
770    #[allow(dead_code)]
771    pub fn record(&mut self, depth: u64) {
772        self.call_count += 1;
773        self.total_steps += depth;
774        if depth > self.max_depth_eliminated {
775            self.max_depth_eliminated = depth;
776        }
777    }
778    /// Average depth per call.
779    #[allow(dead_code)]
780    pub fn avg_depth(&self) -> f64 {
781        if self.call_count == 0 {
782            0.0
783        } else {
784            self.total_steps as f64 / self.call_count as f64
785        }
786    }
787}
788/// Represents a chain of tail calls that can potentially be fused.
789#[allow(dead_code)]
790#[derive(Clone, Debug)]
791pub struct TailCallChain {
792    /// Ordered list of function names in the chain.
793    pub functions: Vec<String>,
794    /// Whether the chain can be safely fused into a single jump.
795    pub can_fuse: bool,
796}
797impl TailCallChain {
798    /// Create a new chain.
799    #[allow(dead_code)]
800    pub fn new() -> Self {
801        Self {
802            functions: Vec::new(),
803            can_fuse: true,
804        }
805    }
806    /// Add a function to the chain.
807    #[allow(dead_code)]
808    pub fn push(&mut self, name: &str) {
809        self.functions.push(name.to_string());
810    }
811    /// Mark as non-fusable.
812    #[allow(dead_code)]
813    pub fn mark_non_fusable(&mut self) {
814        self.can_fuse = false;
815    }
816    /// Length of the chain.
817    #[allow(dead_code)]
818    pub fn len(&self) -> usize {
819        self.functions.len()
820    }
821    /// Whether the chain is empty.
822    #[allow(dead_code)]
823    pub fn is_empty(&self) -> bool {
824        self.functions.is_empty()
825    }
826}
827/// A full TCO optimization pass over a module's functions.
828#[allow(dead_code)]
829pub struct TailCallOptimizationPass {
830    /// Optimizer instance.
831    pub optimizer: TailCallOptimizer,
832    /// Functions processed.
833    pub processed: Vec<String>,
834    /// Functions skipped (already TCO-safe or inlined).
835    pub skipped: Vec<String>,
836}
837impl TailCallOptimizationPass {
838    /// Create a new pass.
839    #[allow(dead_code)]
840    pub fn new() -> Self {
841        Self {
842            optimizer: TailCallOptimizer::new(),
843            processed: Vec::new(),
844            skipped: Vec::new(),
845        }
846    }
847    /// Process a function with the given bytecode opcode names.
848    /// Returns the analysis report.
849    #[allow(dead_code)]
850    pub fn process_function(
851        &mut self,
852        name: &str,
853        opcodes: &[&str],
854        skip: bool,
855    ) -> Option<TailCallAnalysisReport> {
856        if skip {
857            self.skipped.push(name.to_string());
858            return None;
859        }
860        let report = self.optimizer.analyse_chunk(opcodes);
861        self.processed.push(name.to_string());
862        Some(report)
863    }
864    /// Summary of processing.
865    #[allow(dead_code)]
866    pub fn summary(&self) -> String {
867        format!(
868            "TCO pass: {} processed, {} skipped, {} tail calls found",
869            self.processed.len(),
870            self.skipped.len(),
871            self.optimizer.stats.optimized
872        )
873    }
874}
875/// Tracks how many tail calls were optimized during a run.
876#[derive(Clone, Debug, Default)]
877pub struct TailCallCounter {
878    /// Total tail calls optimized (turned into loop iterations).
879    pub optimized: u64,
880    /// Maximum trampoline depth seen in a single evaluation.
881    pub max_depth: u64,
882}
883impl TailCallCounter {
884    /// Create a zeroed counter.
885    pub fn new() -> Self {
886        Self::default()
887    }
888    /// Record one optimized tail call at the given depth.
889    pub fn record(&mut self, depth: u64) {
890        self.optimized += 1;
891        if depth > self.max_depth {
892            self.max_depth = depth;
893        }
894    }
895}
896/// Decision on whether to inline a call.
897#[allow(dead_code)]
898#[derive(Clone, Copy, Debug, PartialEq, Eq)]
899pub enum InliningDecision {
900    /// Inline the callee.
901    Inline,
902    /// Do not inline (call it normally).
903    DoNotInline,
904    /// Force inline regardless of heuristics (e.g., `#[inline(always)]`).
905    ForceInline,
906}
907/// Controls the behaviour of a batched tail-call scheduler.
908#[allow(dead_code)]
909#[derive(Clone, Debug)]
910pub struct TailCallSchedulerConfig {
911    /// Maximum number of trampoline steps per batch before yielding.
912    pub max_steps_per_batch: usize,
913    /// Maximum total steps allowed before returning an error.
914    pub step_limit: u64,
915}
916/// Result of a tail-call micro-benchmark.
917#[allow(dead_code)]
918#[derive(Clone, Debug)]
919pub struct TailCallBenchmarkResult {
920    /// Name of the benchmark.
921    pub name: String,
922    /// Number of iterations run.
923    pub iterations: u64,
924    /// Total wall-clock nanoseconds (stub: always 0 in tests).
925    pub total_ns: u64,
926    /// Final computed value.
927    pub value: u64,
928}
929impl TailCallBenchmarkResult {
930    /// Compute throughput in iterations per second (0 if total_ns == 0).
931    #[allow(dead_code)]
932    pub fn throughput(&self) -> f64 {
933        if self.total_ns == 0 {
934            0.0
935        } else {
936            self.iterations as f64 / (self.total_ns as f64 / 1_000_000_000.0)
937        }
938    }
939    /// Format as a human-readable report line.
940    #[allow(dead_code)]
941    pub fn report(&self) -> String {
942        format!(
943            "Benchmark[{}]: {} iters, {} ns, value={}",
944            self.name, self.iterations, self.total_ns, self.value
945        )
946    }
947}
948/// A peephole optimization rule: if the opcode pattern matches, replace it.
949#[allow(dead_code)]
950pub struct PeepholeRule {
951    /// The opcode sequence to match (by name).
952    pub pattern: Vec<&'static str>,
953    /// The replacement opcode sequence.
954    pub replacement: Vec<&'static str>,
955    /// Human-readable description of the optimization.
956    pub description: &'static str,
957}
958impl PeepholeRule {
959    /// Create a new peephole rule.
960    #[allow(dead_code)]
961    pub fn new(
962        pattern: Vec<&'static str>,
963        replacement: Vec<&'static str>,
964        description: &'static str,
965    ) -> Self {
966        Self {
967            pattern,
968            replacement,
969            description,
970        }
971    }
972}
973/// A proof certificate that a function is TCO-safe.
974#[allow(dead_code)]
975#[derive(Clone, Debug)]
976pub struct TailCallProof {
977    /// Name of the function proven TCO-safe.
978    pub function_name: String,
979    /// Which argument decreases on each call.
980    pub decreasing_argument: String,
981    /// Well-founded ordering used.
982    pub ordering: String,
983    /// Informal justification.
984    pub justification: String,
985}
986impl TailCallProof {
987    /// Create a proof certificate.
988    #[allow(dead_code)]
989    pub fn new(
990        function_name: &str,
991        decreasing_argument: &str,
992        ordering: &str,
993        justification: &str,
994    ) -> Self {
995        Self {
996            function_name: function_name.to_string(),
997            decreasing_argument: decreasing_argument.to_string(),
998            ordering: ordering.to_string(),
999            justification: justification.to_string(),
1000        }
1001    }
1002    /// Format the proof as a human-readable string.
1003    #[allow(dead_code)]
1004    pub fn format(&self) -> String {
1005        format!(
1006            "TCO Proof for `{}`:\n  Decreasing: {}\n  Ordering: {}\n  Justification: {}",
1007            self.function_name, self.decreasing_argument, self.ordering, self.justification
1008        )
1009    }
1010}
1011/// Analysis result for a single bytecode instruction position.
1012#[derive(Clone, Copy, Debug, PartialEq, Eq)]
1013pub enum TailPosition {
1014    /// The instruction is in tail position (its result leaves the function).
1015    Tail,
1016    /// The instruction is not in tail position.
1017    NonTail,
1018}
1019/// An explicit call stack used when TCO cannot be applied.
1020#[allow(dead_code)]
1021pub struct ExplicitCallStack {
1022    frames: Vec<StackFrame>,
1023    /// Maximum depth ever reached.
1024    pub max_depth: usize,
1025}
1026impl ExplicitCallStack {
1027    /// Create an empty call stack.
1028    #[allow(dead_code)]
1029    pub fn new() -> Self {
1030        Self {
1031            frames: Vec::new(),
1032            max_depth: 0,
1033        }
1034    }
1035    /// Push a new frame.
1036    #[allow(dead_code)]
1037    pub fn push(&mut self, frame: StackFrame) {
1038        self.frames.push(frame);
1039        if self.frames.len() > self.max_depth {
1040            self.max_depth = self.frames.len();
1041        }
1042    }
1043    /// Pop the top frame.
1044    #[allow(dead_code)]
1045    pub fn pop(&mut self) -> Option<StackFrame> {
1046        self.frames.pop()
1047    }
1048    /// Return the current depth.
1049    #[allow(dead_code)]
1050    pub fn depth(&self) -> usize {
1051        self.frames.len()
1052    }
1053    /// Peek at the top frame without removing it.
1054    #[allow(dead_code)]
1055    pub fn top(&self) -> Option<&StackFrame> {
1056        self.frames.last()
1057    }
1058    /// Peek at the top frame mutably.
1059    #[allow(dead_code)]
1060    pub fn top_mut(&mut self) -> Option<&mut StackFrame> {
1061        self.frames.last_mut()
1062    }
1063    /// Return a backtrace as a list of function names.
1064    #[allow(dead_code)]
1065    pub fn backtrace(&self) -> Vec<&str> {
1066        self.frames.iter().map(|f| f.function.as_str()).collect()
1067    }
1068    /// Return a formatted backtrace string.
1069    #[allow(dead_code)]
1070    pub fn format_backtrace(&self) -> String {
1071        let mut out = String::from("Backtrace (most recent last):\n");
1072        for (i, frame) in self.frames.iter().enumerate() {
1073            out.push_str(&format!(
1074                "  {:3}: {} @ {}\n",
1075                i, frame.function, frame.return_address
1076            ));
1077        }
1078        out
1079    }
1080}
1081/// Result of a scheduler tick.
1082#[allow(dead_code)]
1083pub enum SchedulerTickResult<T> {
1084    /// The computation is not yet done.
1085    Pending,
1086    /// The computation finished with value `T`.
1087    Finished(T),
1088    /// The step limit was exceeded.
1089    StepLimitExceeded,
1090}
1091/// Represents a step of a mutually-recursive computation.
1092///
1093/// Two functions `even` and `odd` can be represented using a single
1094/// `MutualTailCall` enum, with `A` and `B` tagging which "side" we are on.
1095#[allow(dead_code)]
1096pub enum MutualTailCall<A, B, R> {
1097    /// Continue with the A-branch.
1098    GoA(A, Box<dyn FnOnce(A) -> MutualTailCall<A, B, R>>),
1099    /// Continue with the B-branch.
1100    GoB(B, Box<dyn FnOnce(B) -> MutualTailCall<A, B, R>>),
1101    /// Final result.
1102    Done(R),
1103}
1104/// An explicit continuation frame.
1105#[allow(dead_code)]
1106#[derive(Clone, Debug)]
1107pub enum ContinuationFrame {
1108    /// Apply a binary operation to the top of the value stack and this operand.
1109    ApplyBinop { op: BinopKind, operand: u64 },
1110    /// Store the result in the named variable.
1111    StoreResult { var: String },
1112    /// Print the result.
1113    PrintResult,
1114}
1115/// The result of partially evaluating an expression.
1116#[allow(dead_code)]
1117#[derive(Clone, Debug, PartialEq)]
1118pub enum PartialValue {
1119    /// Known concrete value.
1120    Known(u64),
1121    /// Unknown symbolic value.
1122    Unknown(String),
1123    /// Bottom (error / undefined).
1124    Bottom,
1125}
1126impl PartialValue {
1127    /// Return true if the value is concrete.
1128    #[allow(dead_code)]
1129    pub fn is_known(&self) -> bool {
1130        matches!(self, PartialValue::Known(_))
1131    }
1132    /// Add two partial values.
1133    #[allow(dead_code)]
1134    pub fn add(&self, other: &PartialValue) -> PartialValue {
1135        match (self, other) {
1136            (PartialValue::Known(a), PartialValue::Known(b)) => {
1137                PartialValue::Known(a.wrapping_add(*b))
1138            }
1139            (PartialValue::Bottom, _) | (_, PartialValue::Bottom) => PartialValue::Bottom,
1140            _ => PartialValue::Unknown(format!("{:?} + {:?}", self, other)),
1141        }
1142    }
1143    /// Multiply two partial values.
1144    #[allow(dead_code)]
1145    pub fn mul(&self, other: &PartialValue) -> PartialValue {
1146        match (self, other) {
1147            (PartialValue::Known(a), PartialValue::Known(b)) => {
1148                PartialValue::Known(a.wrapping_mul(*b))
1149            }
1150            (PartialValue::Known(0), _) | (_, PartialValue::Known(0)) => PartialValue::Known(0),
1151            (PartialValue::Bottom, _) | (_, PartialValue::Bottom) => PartialValue::Bottom,
1152            _ => PartialValue::Unknown(format!("{:?} * {:?}", self, other)),
1153        }
1154    }
1155}
1156/// A state in an explicit finite state machine.
1157#[allow(dead_code)]
1158#[derive(Clone, Debug, PartialEq, Eq, Hash)]
1159pub struct StateMachineState {
1160    /// Numeric state identifier.
1161    pub id: u32,
1162    /// Accumulated output collected during transitions.
1163    pub output: Vec<String>,
1164}
1165impl StateMachineState {
1166    /// Create a state with the given id and empty output.
1167    #[allow(dead_code)]
1168    pub fn new(id: u32) -> Self {
1169        Self {
1170            id,
1171            output: Vec::new(),
1172        }
1173    }
1174    /// Append a string to the accumulated output.
1175    #[allow(dead_code)]
1176    pub fn emit(mut self, s: &str) -> Self {
1177        self.output.push(s.to_string());
1178        self
1179    }
1180}
1181/// A continuation-based evaluator with an explicit continuation stack.
1182#[allow(dead_code)]
1183pub struct ContinuationEvaluator {
1184    /// Value stack.
1185    pub values: Vec<u64>,
1186    /// Continuation stack.
1187    pub continuations: Vec<ContinuationFrame>,
1188    /// Named variable bindings.
1189    pub env: std::collections::HashMap<String, u64>,
1190}
1191impl ContinuationEvaluator {
1192    /// Create a new evaluator.
1193    #[allow(dead_code)]
1194    pub fn new() -> Self {
1195        Self {
1196            values: Vec::new(),
1197            continuations: Vec::new(),
1198            env: std::collections::HashMap::new(),
1199        }
1200    }
1201    /// Push a value onto the value stack.
1202    #[allow(dead_code)]
1203    pub fn push_value(&mut self, v: u64) {
1204        self.values.push(v);
1205    }
1206    /// Pop a value from the value stack.
1207    #[allow(dead_code)]
1208    pub fn pop_value(&mut self) -> Option<u64> {
1209        self.values.pop()
1210    }
1211    /// Push a continuation frame.
1212    #[allow(dead_code)]
1213    pub fn push_cont(&mut self, frame: ContinuationFrame) {
1214        self.continuations.push(frame);
1215    }
1216    /// Step: pop one continuation frame and apply it to the top value.
1217    #[allow(dead_code)]
1218    pub fn step(&mut self) -> bool {
1219        let frame = match self.continuations.pop() {
1220            Some(f) => f,
1221            None => return false,
1222        };
1223        match frame {
1224            ContinuationFrame::ApplyBinop { op, operand } => {
1225                if let Some(lhs) = self.values.pop() {
1226                    if let Some(result) = op.eval(lhs, operand) {
1227                        self.values.push(result);
1228                    }
1229                }
1230            }
1231            ContinuationFrame::StoreResult { var } => {
1232                if let Some(v) = self.values.last().copied() {
1233                    self.env.insert(var, v);
1234                }
1235            }
1236            ContinuationFrame::PrintResult => {}
1237        }
1238        true
1239    }
1240    /// Run to completion.
1241    #[allow(dead_code)]
1242    pub fn run(&mut self) {
1243        while self.step() {}
1244    }
1245}