Skip to main content

oxilean_runtime/lazy_eval/
types.rs

1//! Auto-generated module
2//!
3//! 🤖 Generated with [SplitRS](https://github.com/cool-japan/splitrs)
4
5use std::cell::RefCell;
6use std::collections::{HashMap, HashSet};
7use std::fmt;
8use std::sync::{Arc, Mutex, OnceLock};
9
10use super::functions::TailFn;
11
12/// A simple black-hole detector for named computations.
13///
14/// Records which thunks are currently being forced. If a thunk is
15/// re-entered while already being forced, a black hole is detected.
16#[derive(Default, Debug)]
17pub struct BlackHoleDetector {
18    in_progress: std::collections::HashSet<String>,
19}
20impl BlackHoleDetector {
21    /// Create a new detector.
22    pub fn new() -> Self {
23        BlackHoleDetector::default()
24    }
25    /// Enter a thunk named `name`. Returns `Err` if a black hole is detected.
26    pub fn enter(&mut self, name: impl Into<String>) -> Result<(), String> {
27        let n = name.into();
28        if self.in_progress.contains(&n) {
29            return Err(format!("black hole detected in `{}`", n));
30        }
31        self.in_progress.insert(n);
32        Ok(())
33    }
34    /// Exit a thunk named `name`.
35    pub fn exit(&mut self, name: &str) {
36        self.in_progress.remove(name);
37    }
38    /// Whether a given thunk is currently being forced.
39    pub fn is_in_progress(&self, name: &str) -> bool {
40        self.in_progress.contains(name)
41    }
42    /// Number of thunks currently being forced.
43    pub fn depth(&self) -> usize {
44        self.in_progress.len()
45    }
46}
47/// A lazy map where each value is computed on first access and memoized.
48pub struct LazyMap<K: Eq + std::hash::Hash + Clone, V> {
49    computed: HashMap<K, V>,
50    pending: HashMap<K, Box<dyn FnOnce() -> V>>,
51}
52impl<K: Eq + std::hash::Hash + Clone, V: Clone + fmt::Debug> LazyMap<K, V> {
53    /// Create a new empty lazy map.
54    pub fn new() -> Self {
55        LazyMap {
56            computed: HashMap::new(),
57            pending: HashMap::new(),
58        }
59    }
60    /// Insert a lazy value under `key`.
61    pub fn insert_lazy<F>(&mut self, key: K, f: F)
62    where
63        F: FnOnce() -> V + 'static,
64    {
65        self.pending.insert(key, Box::new(f));
66    }
67    /// Insert an already-computed value.
68    pub fn insert_ready(&mut self, key: K, value: V) {
69        self.computed.insert(key, value);
70    }
71    /// Force and return the value for `key`, if it exists.
72    pub fn get(&mut self, key: &K) -> Option<&V> {
73        if let Some(f) = self.pending.remove(key) {
74            let val = f();
75            self.computed.insert(key.clone(), val);
76        }
77        self.computed.get(key)
78    }
79    /// Whether a value (pending or computed) exists under `key`.
80    pub fn contains(&self, key: &K) -> bool {
81        self.computed.contains_key(key) || self.pending.contains_key(key)
82    }
83    /// Number of entries (both pending and computed).
84    pub fn len(&self) -> usize {
85        self.computed.len() + self.pending.len()
86    }
87    /// Whether the map is empty.
88    pub fn is_empty(&self) -> bool {
89        self.computed.is_empty() && self.pending.is_empty()
90    }
91    /// Number of entries that have been computed.
92    pub fn computed_count(&self) -> usize {
93        self.computed.len()
94    }
95    /// Number of entries still pending.
96    pub fn pending_count(&self) -> usize {
97        self.pending.len()
98    }
99}
100/// Generates prime numbers lazily using a stream-based sieve.
101pub struct LazySieve {
102    primes: Vec<u64>,
103    candidate: u64,
104}
105impl LazySieve {
106    /// Create a new lazy prime sieve starting from 2.
107    pub fn new() -> Self {
108        LazySieve {
109            primes: Vec::new(),
110            candidate: 2,
111        }
112    }
113    /// Get the next prime number.
114    pub fn next_prime(&mut self) -> u64 {
115        loop {
116            let c = self.candidate;
117            self.candidate += 1;
118            let is_prime = self.primes.iter().all(|&p| c % p != 0);
119            if is_prime {
120                self.primes.push(c);
121                return c;
122            }
123        }
124    }
125    /// Get the first `n` prime numbers.
126    pub fn take_primes(&mut self, n: usize) -> Vec<u64> {
127        (0..n).map(|_| self.next_prime()).collect()
128    }
129}
130/// A simple memoization cache keyed by u64 hash.
131#[allow(dead_code)]
132#[derive(Debug, Default)]
133pub struct MemoCache<V> {
134    entries: std::collections::HashMap<u64, V>,
135    pub(crate) hits: usize,
136    pub(crate) misses: usize,
137}
138#[allow(dead_code)]
139impl<V: Clone> MemoCache<V> {
140    pub fn new() -> Self {
141        Self {
142            entries: std::collections::HashMap::new(),
143            hits: 0,
144            misses: 0,
145        }
146    }
147    pub fn get(&mut self, key: u64) -> Option<&V> {
148        if self.entries.contains_key(&key) {
149            self.hits += 1;
150            self.entries.get(&key)
151        } else {
152            self.misses += 1;
153            None
154        }
155    }
156    pub fn insert(&mut self, key: u64, value: V) {
157        self.entries.insert(key, value);
158    }
159    pub fn hit_rate(&self) -> f64 {
160        let total = self.hits + self.misses;
161        if total == 0 {
162            0.0
163        } else {
164            self.hits as f64 / total as f64
165        }
166    }
167    pub fn len(&self) -> usize {
168        self.entries.len()
169    }
170    pub fn is_empty(&self) -> bool {
171        self.entries.is_empty()
172    }
173    pub fn clear(&mut self) {
174        self.entries.clear();
175        self.hits = 0;
176        self.misses = 0;
177    }
178}
179/// Internal state of a single-threaded thunk.
180pub(super) enum ThunkState<T> {
181    /// Not yet evaluated; holds the suspended computation.
182    Unevaluated(Box<dyn FnOnce() -> T>),
183    /// Already evaluated; holds the memoized result.
184    Evaluated(T),
185    /// Evaluation is in progress (detects infinite loops / black holes).
186    BlackHole,
187}
188/// An accumulator that collects values lazily (only when iterated).
189pub struct LazyAccumulator<T> {
190    pending: Vec<Box<dyn FnOnce() -> T>>,
191    collected: Vec<T>,
192}
193impl<T: Clone + fmt::Debug> LazyAccumulator<T> {
194    /// Create a new empty accumulator.
195    pub fn new() -> Self {
196        LazyAccumulator {
197            pending: Vec::new(),
198            collected: Vec::new(),
199        }
200    }
201    /// Add a lazy value.
202    pub fn add<F: FnOnce() -> T + 'static>(&mut self, f: F) {
203        self.pending.push(Box::new(f));
204    }
205    /// Force all pending values.
206    pub fn flush(&mut self) {
207        while let Some(f) = self.pending.pop() {
208            self.collected.push(f());
209        }
210    }
211    /// Get all collected values (after flushing).
212    pub fn collect(&mut self) -> &[T] {
213        self.flush();
214        &self.collected
215    }
216    /// Number of pending items.
217    pub fn pending_count(&self) -> usize {
218        self.pending.len()
219    }
220    /// Number of collected items.
221    pub fn collected_count(&self) -> usize {
222        self.collected.len()
223    }
224}
225/// An infinite stream backed by a generating function.
226///
227/// The generator is called with a mutable state value to produce
228/// the next element.
229pub struct StreamThunk<S: Clone, T> {
230    state: S,
231    gen: Box<dyn Fn(&S) -> (T, S)>,
232}
233impl<S: Clone + fmt::Debug, T: Clone + fmt::Debug> StreamThunk<S, T> {
234    /// Create a new stream with initial state `s` and generator `f`.
235    ///
236    /// `f(state)` returns `(next_value, next_state)`.
237    pub fn new<F>(init: S, f: F) -> Self
238    where
239        F: Fn(&S) -> (T, S) + 'static,
240    {
241        StreamThunk {
242            state: init,
243            gen: Box::new(f),
244        }
245    }
246    /// Produce the next value and advance the stream.
247    pub fn next(&mut self) -> T {
248        let (val, new_state) = (self.gen)(&self.state);
249        self.state = new_state;
250        val
251    }
252    /// Take `n` elements and return them as a vector.
253    pub fn take_n(&mut self, n: usize) -> Vec<T> {
254        (0..n).map(|_| self.next()).collect()
255    }
256    /// Peek at the current state without advancing.
257    pub fn current_state(&self) -> &S {
258        &self.state
259    }
260}
261/// A value that may be either:
262/// - Already computed (`Ready`),
263/// - Deferred to be computed later (`Deferred` with a name tag),
264/// - Failed to compute (`Failed` with an error message).
265#[derive(Clone, Debug)]
266pub enum DeferredValue<T> {
267    /// The value is ready.
268    Ready(T),
269    /// The value is not yet computed; `name` identifies the computation.
270    Deferred { name: String },
271    /// Computation failed with this error message.
272    Failed { error: String },
273}
274impl<T: Clone + fmt::Debug> DeferredValue<T> {
275    /// Unwrap a ready value, panicking if not ready.
276    pub fn unwrap_ready(self) -> T {
277        match self {
278            DeferredValue::Ready(v) => v,
279            DeferredValue::Deferred { name } => {
280                panic!(
281                    "DeferredValue::unwrap_ready: value '{}' not yet computed",
282                    name
283                )
284            }
285            DeferredValue::Failed { error } => {
286                panic!("DeferredValue::unwrap_ready: computation failed: {}", error)
287            }
288        }
289    }
290    /// Whether the value is ready.
291    pub fn is_ready(&self) -> bool {
292        matches!(self, DeferredValue::Ready(_))
293    }
294    /// Whether the value is still deferred.
295    pub fn is_deferred(&self) -> bool {
296        matches!(self, DeferredValue::Deferred { .. })
297    }
298    /// Whether the computation failed.
299    pub fn is_failed(&self) -> bool {
300        matches!(self, DeferredValue::Failed { .. })
301    }
302    /// Map over a ready value.
303    pub fn map<U, F>(self, f: F) -> DeferredValue<U>
304    where
305        F: FnOnce(T) -> U,
306    {
307        match self {
308            DeferredValue::Ready(v) => DeferredValue::Ready(f(v)),
309            DeferredValue::Deferred { name } => DeferredValue::Deferred { name },
310            DeferredValue::Failed { error } => DeferredValue::Failed { error },
311        }
312    }
313    /// Get the ready value, if any.
314    pub fn get(&self) -> Option<&T> {
315        match self {
316            DeferredValue::Ready(v) => Some(v),
317            _ => None,
318        }
319    }
320}
321/// A lazy iterator over an arithmetic range [start, end) with a step.
322pub struct LazyRange {
323    pub(super) current: i64,
324    pub(super) end: i64,
325    pub(super) step: i64,
326}
327impl LazyRange {
328    /// Create a range [start..end) with the given step.
329    pub fn new(start: i64, end: i64, step: i64) -> Self {
330        assert!(step != 0, "step must be non-zero");
331        LazyRange {
332            current: start,
333            end,
334            step,
335        }
336    }
337    /// Create a simple range [0..n).
338    pub fn up_to(n: i64) -> Self {
339        Self::new(0, n, 1)
340    }
341    /// Collect all values.
342    pub fn collect_all(self) -> Vec<i64> {
343        self.collect()
344    }
345}
346/// A named cache of lazy values.
347///
348/// Useful for memoizing top-level definitions that should only be evaluated
349/// once during a program run.
350pub struct ThunkCache {
351    pub(super) entries: HashMap<String, Arc<dyn std::any::Any + Send + Sync>>,
352}
353impl ThunkCache {
354    /// Create an empty cache.
355    pub fn new() -> Self {
356        ThunkCache {
357            entries: HashMap::new(),
358        }
359    }
360    /// Insert a lazily-computed entry under `name`.
361    ///
362    /// If `name` is already in the cache the old entry is replaced.
363    pub fn insert<T, F>(&mut self, name: impl Into<String>, f: F)
364    where
365        T: Clone + fmt::Debug + Send + Sync + 'static,
366        F: FnOnce() -> T + Send + 'static,
367    {
368        let thunk = Arc::new(SharedThunk::new(f));
369        self.entries.insert(name.into(), thunk);
370    }
371    /// Insert a pre-evaluated value under `name`.
372    pub fn insert_pure<T>(&mut self, name: impl Into<String>, value: T)
373    where
374        T: Clone + fmt::Debug + Send + Sync + 'static,
375    {
376        let thunk = Arc::new(SharedThunk::pure(value));
377        self.entries.insert(name.into(), thunk);
378    }
379    /// Force and return the value under `name`, if it exists.
380    pub fn force<T>(&self, name: &str) -> Option<T>
381    where
382        T: Clone + fmt::Debug + Send + Sync + 'static,
383    {
384        let entry = self.entries.get(name)?;
385        let thunk = entry.downcast_ref::<SharedThunk<T>>()?;
386        Some(thunk.force())
387    }
388    /// Returns `true` if an entry with `name` exists in the cache.
389    pub fn contains(&self, name: &str) -> bool {
390        self.entries.contains_key(name)
391    }
392    /// Number of entries in the cache.
393    pub fn len(&self) -> usize {
394        self.entries.len()
395    }
396    /// Whether the cache is empty.
397    pub fn is_empty(&self) -> bool {
398        self.entries.is_empty()
399    }
400}
401/// Iterator produced by [`LazyList::take`].
402pub struct TakeIter<T: Clone + 'static> {
403    pub(super) current: Option<(T, Option<TailFn<T>>)>,
404    pub(super) remaining: usize,
405}
406/// A chain of lazy computations that can be built up and then forced.
407///
408/// Each step maps the previous value to the next. This is a simplified
409/// synchronous "future" / continuation chain.
410pub struct FutureChain<T> {
411    /// The initial computation.
412    initial: Box<dyn FnOnce() -> T>,
413    /// The chain of transformations.
414    steps: Vec<Box<dyn FnOnce(T) -> T>>,
415}
416impl<T: 'static> FutureChain<T> {
417    /// Create a chain starting from `init`.
418    pub fn new<F: FnOnce() -> T + 'static>(init: F) -> Self {
419        FutureChain {
420            initial: Box::new(init),
421            steps: Vec::new(),
422        }
423    }
424    /// Add a transformation step.
425    pub fn then<F: FnOnce(T) -> T + 'static>(mut self, f: F) -> Self {
426        self.steps.push(Box::new(f));
427        self
428    }
429    /// Force the chain, running all computations in order.
430    pub fn force(self) -> T {
431        let mut val = (self.initial)();
432        for step in self.steps {
433            val = step(val);
434        }
435        val
436    }
437    /// Number of steps in the chain.
438    pub fn step_count(&self) -> usize {
439        self.steps.len()
440    }
441}
442/// A batch evaluator that runs a collection of named thunks in sequence.
443pub struct BatchEval<T> {
444    tasks: Vec<(String, Box<dyn FnOnce() -> T>)>,
445    results: Vec<(String, T)>,
446}
447impl<T: Clone + fmt::Debug> BatchEval<T> {
448    /// Create a new empty batch.
449    pub fn new() -> Self {
450        BatchEval {
451            tasks: Vec::new(),
452            results: Vec::new(),
453        }
454    }
455    /// Add a named task to the batch.
456    pub fn add<F: FnOnce() -> T + 'static>(&mut self, name: impl Into<String>, f: F) {
457        self.tasks.push((name.into(), Box::new(f)));
458    }
459    /// Run all tasks and collect results.
460    pub fn run_all(&mut self) {
461        while let Some((name, f)) = self.tasks.pop() {
462            let val = f();
463            self.results.push((name, val));
464        }
465    }
466    /// Get the result for a given name, if it was run.
467    pub fn result(&self, name: &str) -> Option<&T> {
468        self.results.iter().find(|(n, _)| n == name).map(|(_, v)| v)
469    }
470    /// Number of tasks pending.
471    pub fn pending(&self) -> usize {
472        self.tasks.len()
473    }
474    /// Number of completed tasks.
475    pub fn completed(&self) -> usize {
476        self.results.len()
477    }
478    /// All results.
479    pub fn all_results(&self) -> &[(String, T)] {
480        &self.results
481    }
482}
483/// Lazy once-cell that tracks how many times it was forced.
484#[allow(dead_code)]
485#[derive(Debug, Default)]
486pub struct TrackedCell<T> {
487    value: Option<T>,
488    force_count: usize,
489}
490#[allow(dead_code)]
491impl<T: Clone> TrackedCell<T> {
492    pub fn new() -> Self {
493        Self {
494            value: None,
495            force_count: 0,
496        }
497    }
498    pub fn set(&mut self, val: T) {
499        if self.value.is_none() {
500            self.value = Some(val);
501        }
502    }
503    pub fn get(&mut self) -> Option<&T> {
504        if self.value.is_some() {
505            self.force_count += 1;
506        }
507        self.value.as_ref()
508    }
509    pub fn force_count(&self) -> usize {
510        self.force_count
511    }
512    pub fn is_initialized(&self) -> bool {
513        self.value.is_some()
514    }
515}
516/// Memoize a two-argument function over a HashMap.
517pub struct MemoFn2<I1, I2, O> {
518    cache: HashMap<(I1, I2), O>,
519    func: Box<dyn Fn(I1, I2) -> O>,
520}
521impl<I1, I2, O> MemoFn2<I1, I2, O>
522where
523    I1: Eq + std::hash::Hash + Clone,
524    I2: Eq + std::hash::Hash + Clone,
525    O: Clone,
526{
527    /// Create a memoized 2-argument function.
528    pub fn new<F: Fn(I1, I2) -> O + 'static>(f: F) -> Self {
529        MemoFn2 {
530            cache: HashMap::new(),
531            func: Box::new(f),
532        }
533    }
534    /// Call the memoized function.
535    pub fn call(&mut self, a: I1, b: I2) -> O {
536        let key = (a.clone(), b.clone());
537        if let Some(v) = self.cache.get(&key) {
538            return v.clone();
539        }
540        let result = (self.func)(a, b);
541        self.cache.insert(key, result.clone());
542        result
543    }
544    /// Number of cached results.
545    pub fn cache_size(&self) -> usize {
546        self.cache.len()
547    }
548    /// Clear the cache.
549    pub fn clear_cache(&mut self) {
550        self.cache.clear();
551    }
552}
553/// A vector where elements are computed lazily on first access.
554pub struct ThunkVec<T> {
555    thunks: Vec<std::cell::RefCell<ThunkVecState<T>>>,
556}
557impl<T: Clone + fmt::Debug> ThunkVec<T> {
558    /// Create a new empty ThunkVec.
559    pub fn new() -> Self {
560        ThunkVec { thunks: Vec::new() }
561    }
562    /// Push a lazy element.
563    pub fn push_lazy<F: FnOnce() -> T + 'static>(&mut self, f: F) {
564        self.thunks
565            .push(std::cell::RefCell::new(ThunkVecState::Pending(Box::new(f))));
566    }
567    /// Push an already-computed element.
568    pub fn push_ready(&mut self, value: T) {
569        self.thunks
570            .push(std::cell::RefCell::new(ThunkVecState::Computed(value)));
571    }
572    /// Get the element at `index`, forcing it if necessary.
573    pub fn get(&self, index: usize) -> Option<T> {
574        let cell = self.thunks.get(index)?;
575        let state = cell.borrow();
576        if let ThunkVecState::Computed(ref v) = *state {
577            return Some(v.clone());
578        }
579        drop(state);
580        let old = cell.replace(ThunkVecState::Computed(
581            match cell.replace(ThunkVecState::Computed(unsafe { std::mem::zeroed() })) {
582                ThunkVecState::Pending(f) => f(),
583                ThunkVecState::Computed(v) => v,
584            },
585        ));
586        drop(old);
587        let state2 = cell.borrow();
588        if let ThunkVecState::Computed(ref v) = *state2 {
589            Some(v.clone())
590        } else {
591            None
592        }
593    }
594    /// Length of the vec.
595    pub fn len(&self) -> usize {
596        self.thunks.len()
597    }
598    /// Whether the vec is empty.
599    pub fn is_empty(&self) -> bool {
600        self.thunks.is_empty()
601    }
602    /// Force all thunks and return a Vec.
603    pub fn force_all(&self) -> Vec<T> {
604        (0..self.len()).filter_map(|i| self.get(i)).collect()
605    }
606}
607/// A memoizing evaluator that safely handles mutual recursion by returning
608/// a fallback value when a cycle is detected (instead of panicking).
609pub struct CycleSafeMemo<T: Clone + Default> {
610    cache: HashMap<String, T>,
611    in_progress: std::collections::HashSet<String>,
612}
613impl<T: Clone + Default + fmt::Debug> CycleSafeMemo<T> {
614    /// Create a new cycle-safe memo.
615    pub fn new() -> Self {
616        CycleSafeMemo {
617            cache: HashMap::new(),
618            in_progress: std::collections::HashSet::new(),
619        }
620    }
621    /// Compute or retrieve the value for `key`. If a cycle is detected,
622    /// returns `T::default()` as a fallback.
623    pub fn get_or_compute<F>(&mut self, key: impl Into<String>, f: F) -> T
624    where
625        F: FnOnce(&mut Self) -> T,
626    {
627        let k = key.into();
628        if let Some(v) = self.cache.get(&k) {
629            return v.clone();
630        }
631        if self.in_progress.contains(&k) {
632            return T::default();
633        }
634        self.in_progress.insert(k.clone());
635        let result = f(self);
636        self.in_progress.remove(&k);
637        self.cache.insert(k, result.clone());
638        result
639    }
640    /// Number of cached entries.
641    pub fn cache_size(&self) -> usize {
642        self.cache.len()
643    }
644}
645/// Thread-safe lazy value backed by `Arc<Mutex<...>>`.
646///
647/// Use this when a thunk needs to be sent across threads (the closure and
648/// the memoized value must both be `Send + Sync`).
649pub struct SharedThunk<T> {
650    pub(super) inner: Arc<Mutex<SharedThunkInner<T>>>,
651}
652impl<T: Clone + fmt::Debug + Send + Sync + 'static> SharedThunk<T> {
653    /// Create a new thread-safe thunk.
654    pub fn new<F: FnOnce() -> T + Send + 'static>(f: F) -> Self {
655        SharedThunk {
656            inner: Arc::new(Mutex::new(SharedThunkInner {
657                value: OnceLock::new(),
658                thunk: Some(Box::new(f)),
659            })),
660        }
661    }
662    /// Create an already-evaluated shared thunk.
663    pub fn pure(value: T) -> Self {
664        let lock = OnceLock::new();
665        let _ = lock.set(value);
666        SharedThunk {
667            inner: Arc::new(Mutex::new(SharedThunkInner {
668                value: lock,
669                thunk: None,
670            })),
671        }
672    }
673    /// Force the thunk (thread-safe, memoized).
674    pub fn force(&self) -> T {
675        let mut guard = self.inner.lock().unwrap_or_else(|e| e.into_inner());
676        if let Some(v) = guard.value.get() {
677            return v.clone();
678        }
679        let f = guard.thunk.take().expect("thunk already consumed");
680        let result = f();
681        let _ = guard.value.set(result.clone());
682        result
683    }
684    /// Returns `true` if already evaluated.
685    pub fn is_evaluated(&self) -> bool {
686        let guard = self.inner.lock().unwrap_or_else(|e| e.into_inner());
687        guard.value.get().is_some()
688    }
689    /// Clone the `Arc` handle, giving a second handle to the *same* thunk.
690    pub fn share(&self) -> Self {
691        SharedThunk {
692            inner: Arc::clone(&self.inner),
693        }
694    }
695}
696/// Internal state for a single thunk in a ThunkVec.
697enum ThunkVecState<T> {
698    Pending(Box<dyn FnOnce() -> T>),
699    Computed(T),
700}
701/// A rose tree node with lazily-computed children.
702pub struct LazyTree<T: Clone + fmt::Debug + 'static> {
703    /// Value at this node.
704    pub value: T,
705    /// Lazily-computed children.
706    children_thunk: Option<Arc<dyn Fn() -> Vec<LazyTree<T>> + Send + Sync>>,
707}
708impl<T: Clone + fmt::Debug + 'static> LazyTree<T> {
709    /// Create a leaf node.
710    pub fn leaf(value: T) -> Self {
711        LazyTree {
712            value,
713            children_thunk: None,
714        }
715    }
716    /// Create an internal node with lazily-computed children.
717    pub fn node<F>(value: T, children: F) -> Self
718    where
719        F: Fn() -> Vec<LazyTree<T>> + Send + Sync + 'static,
720    {
721        LazyTree {
722            value,
723            children_thunk: Some(Arc::new(children)),
724        }
725    }
726    /// Force and return the children.
727    pub fn children(&self) -> Vec<LazyTree<T>> {
728        match &self.children_thunk {
729            None => Vec::new(),
730            Some(f) => f(),
731        }
732    }
733    /// Whether this is a leaf node.
734    pub fn is_leaf(&self) -> bool {
735        self.children_thunk.is_none()
736    }
737    /// Depth-first traversal, collecting values.
738    pub fn dfs(&self) -> Vec<T> {
739        let mut result = vec![self.value.clone()];
740        for child in self.children() {
741            result.extend(child.dfs());
742        }
743        result
744    }
745}
746/// A memoization table that stores computed results keyed by a string.
747pub struct MemoTable {
748    pub(super) entries: HashMap<String, Box<dyn std::any::Any>>,
749}
750impl MemoTable {
751    /// Create a new empty table.
752    pub fn new() -> Self {
753        MemoTable {
754            entries: HashMap::new(),
755        }
756    }
757    /// Insert a precomputed value.
758    pub fn insert<V: 'static>(&mut self, key: impl Into<String>, value: V) {
759        self.entries.insert(key.into(), Box::new(value));
760    }
761    /// Get a value by key and type.
762    pub fn get<V: 'static>(&self, key: &str) -> Option<&V> {
763        self.entries.get(key)?.downcast_ref::<V>()
764    }
765    /// Check if a key exists.
766    pub fn contains(&self, key: &str) -> bool {
767        self.entries.contains_key(key)
768    }
769    /// Remove a key.
770    pub fn remove(&mut self, key: &str) -> bool {
771        self.entries.remove(key).is_some()
772    }
773    /// Number of entries.
774    pub fn len(&self) -> usize {
775        self.entries.len()
776    }
777    /// Whether the table is empty.
778    pub fn is_empty(&self) -> bool {
779        self.entries.is_empty()
780    }
781    /// Clear all entries.
782    pub fn clear(&mut self) {
783        self.entries.clear();
784    }
785}
786/// A potentially-infinite list whose tail is lazy.
787///
788/// ```
789/// # use oxilean_runtime::lazy_eval::LazyList;
790/// let nats = LazyList::from_fn(0u64, |n| n + 1);
791/// let first5: Vec<u64> = nats.take(5).collect();
792/// assert_eq!(first5, vec![0, 1, 2, 3, 4]);
793/// ```
794pub struct LazyList<T: Clone + 'static> {
795    pub(super) head: Option<T>,
796    pub(super) tail: Option<Arc<dyn Fn() -> LazyList<T> + Send + Sync>>,
797}
798impl<T: Clone + fmt::Debug + 'static> LazyList<T> {
799    /// Create an empty lazy list.
800    pub fn empty() -> Self {
801        LazyList {
802            head: None,
803            tail: None,
804        }
805    }
806    /// Create a cons cell.
807    pub fn cons<F>(head: T, tail: F) -> Self
808    where
809        F: Fn() -> LazyList<T> + Send + Sync + 'static,
810    {
811        LazyList {
812            head: Some(head),
813            tail: Some(Arc::new(tail)),
814        }
815    }
816    /// Build an infinite list using a seed value and a successor function.
817    pub fn from_fn(seed: T, next: impl Fn(T) -> T + Send + Sync + Clone + 'static) -> Self
818    where
819        T: Send + Sync,
820    {
821        let next2 = next.clone();
822        let seed2 = next(seed.clone());
823        LazyList::cons(seed, move || {
824            LazyList::from_fn(seed2.clone(), next2.clone())
825        })
826    }
827    /// Whether this list is empty.
828    pub fn is_empty(&self) -> bool {
829        self.head.is_none()
830    }
831    /// Force the head element.
832    pub fn head(&self) -> Option<&T> {
833        self.head.as_ref()
834    }
835    /// Force the tail, returning the next lazy list.
836    pub fn tail(&self) -> LazyList<T> {
837        match &self.tail {
838            Some(f) => f(),
839            None => LazyList::empty(),
840        }
841    }
842    /// Take (at most) the first `n` elements, returning an iterator.
843    pub fn take(&self, n: usize) -> TakeIter<T> {
844        TakeIter {
845            current: self.head.clone().map(|h| {
846                let tail = self.tail.clone();
847                (h, tail)
848            }),
849            remaining: n,
850        }
851    }
852}
853/// Lazy filter adapter.
854pub struct LazyFilter<'a, A> {
855    pub(super) data: &'a [A],
856    pub(super) pred: Box<dyn Fn(&A) -> bool>,
857    pub(super) index: usize,
858}
859impl<'a, A: Clone + fmt::Debug> LazyFilter<'a, A> {
860    /// Create a new lazy filter.
861    pub fn new<F: Fn(&A) -> bool + 'static>(data: &'a [A], pred: F) -> Self {
862        LazyFilter {
863            data,
864            pred: Box::new(pred),
865            index: 0,
866        }
867    }
868    /// Collect all passing elements.
869    pub fn collect_all(&self) -> Vec<A> {
870        self.data
871            .iter()
872            .filter(|x| (self.pred)(x))
873            .cloned()
874            .collect()
875    }
876}
877/// A value that is initialized at most once, returning a reference
878/// on first access.
879///
880/// Unlike [`Thunk`], this variant uses an `Option` and never panics on
881/// black-hole — it returns `None` if not yet initialized.
882pub struct LazyCell<T> {
883    pub(super) inner: std::cell::OnceCell<T>,
884}
885impl<T: Clone + fmt::Debug> LazyCell<T> {
886    /// Create a new uninitialized lazy cell.
887    pub fn new() -> Self {
888        LazyCell {
889            inner: std::cell::OnceCell::new(),
890        }
891    }
892    /// Initialize the cell with `value`. Returns `Ok(())` on success
893    /// or `Err(value)` if already initialized.
894    pub fn init(&self, value: T) -> Result<(), T> {
895        self.inner.set(value)
896    }
897    /// Get the value if initialized.
898    pub fn get(&self) -> Option<&T> {
899        self.inner.get()
900    }
901    /// Get or initialize with `f`.
902    pub fn get_or_init<F: FnOnce() -> T>(&self, f: F) -> &T {
903        self.inner.get_or_init(f)
904    }
905    /// Whether this cell has been initialized.
906    pub fn is_initialized(&self) -> bool {
907        self.inner.get().is_some()
908    }
909}
910/// A lazy string that is only concatenated when needed.
911pub struct LazyString {
912    parts: Vec<String>,
913}
914impl LazyString {
915    /// Create a new empty lazy string.
916    pub fn new() -> Self {
917        LazyString { parts: Vec::new() }
918    }
919    /// Append a string part.
920    pub fn push(mut self, s: impl Into<String>) -> Self {
921        self.parts.push(s.into());
922        self
923    }
924    /// Force the string, concatenating all parts.
925    pub fn build(self) -> String {
926        self.parts.concat()
927    }
928    /// Number of parts.
929    pub fn part_count(&self) -> usize {
930        self.parts.len()
931    }
932}
933/// A thread-safe value computed at most once.
934pub struct Once<T: Clone + Send + Sync + 'static> {
935    pub(super) inner: OnceLock<T>,
936}
937impl<T: Clone + fmt::Debug + Send + Sync + 'static> Once<T> {
938    /// Create a new uninitialized Once.
939    pub fn new() -> Self {
940        Once {
941            inner: OnceLock::new(),
942        }
943    }
944    /// Get or initialize the value.
945    pub fn get_or_init<F: FnOnce() -> T>(&self, f: F) -> &T {
946        self.inner.get_or_init(f)
947    }
948    /// Get the value if initialized.
949    pub fn get(&self) -> Option<&T> {
950        self.inner.get()
951    }
952    /// Whether initialized.
953    pub fn is_initialized(&self) -> bool {
954        self.inner.get().is_some()
955    }
956}
957/// A memoized function that caches all previous results.
958///
959/// ```
960/// # use oxilean_runtime::lazy_eval::MemoFn;
961/// let mut fib = MemoFn::new(|n: u64| {
962///     if n <= 1 { n } else { n } // simplified; see tests for recursive version
963/// });
964/// assert_eq!(fib.call(10), 10);
965/// assert_eq!(fib.call(10), 10); // from cache
966/// ```
967pub struct MemoFn<I, O> {
968    cache: HashMap<I, O>,
969    func: Box<dyn Fn(I) -> O>,
970}
971impl<I: Eq + std::hash::Hash + Clone, O: Clone> MemoFn<I, O> {
972    /// Wrap `f` in a memoized function.
973    pub fn new<F: Fn(I) -> O + 'static>(f: F) -> Self {
974        MemoFn {
975            cache: HashMap::new(),
976            func: Box::new(f),
977        }
978    }
979    /// Call the memoized function; uses cached result if available.
980    pub fn call(&mut self, arg: I) -> O {
981        if let Some(v) = self.cache.get(&arg) {
982            return v.clone();
983        }
984        let result = (self.func)(arg.clone());
985        self.cache.insert(arg, result.clone());
986        result
987    }
988    /// Clear the memoization cache.
989    pub fn clear_cache(&mut self) {
990        self.cache.clear();
991    }
992    /// Number of cached entries.
993    pub fn cache_size(&self) -> usize {
994        self.cache.len()
995    }
996}
997/// A single-threaded lazy value with memoization.
998///
999/// ```
1000/// # use oxilean_runtime::lazy_eval::Thunk;
1001/// let thunk = Thunk::new(|| 6 * 7);
1002/// assert_eq!(*thunk.force(), 42);
1003/// // Subsequent forces return the cached value without recomputing.
1004/// assert_eq!(*thunk.force(), 42);
1005/// ```
1006pub struct Thunk<T> {
1007    pub(super) state: RefCell<ThunkState<T>>,
1008}
1009impl<T: Clone + fmt::Debug> Thunk<T> {
1010    /// Create a new unevaluated thunk wrapping `f`.
1011    pub fn new<F: FnOnce() -> T + 'static>(f: F) -> Self {
1012        Thunk {
1013            state: RefCell::new(ThunkState::Unevaluated(Box::new(f))),
1014        }
1015    }
1016    /// Create an already-evaluated thunk (no suspension needed).
1017    pub fn pure(value: T) -> Self {
1018        Thunk {
1019            state: RefCell::new(ThunkState::Evaluated(value)),
1020        }
1021    }
1022    /// Force the thunk, memoizing the result.
1023    ///
1024    /// # Panics
1025    ///
1026    /// Panics if a cycle is detected (black-hole detection).
1027    pub fn force(&self) -> std::cell::Ref<'_, T> {
1028        {
1029            let s = self.state.borrow();
1030            if matches!(*s, ThunkState::Evaluated(_)) {
1031                return std::cell::Ref::map(s, |state| {
1032                    if let ThunkState::Evaluated(ref v) = state {
1033                        v
1034                    } else {
1035                        unreachable!()
1036                    }
1037                });
1038            }
1039        }
1040        let old_state = self.state.replace(ThunkState::BlackHole);
1041        let result = match old_state {
1042            ThunkState::Unevaluated(f) => f(),
1043            ThunkState::BlackHole => {
1044                panic!("lazy evaluation cycle detected (black hole)")
1045            }
1046            ThunkState::Evaluated(_) => unreachable!(),
1047        };
1048        self.state.replace(ThunkState::Evaluated(result));
1049        std::cell::Ref::map(self.state.borrow(), |state| {
1050            if let ThunkState::Evaluated(ref v) = state {
1051                v
1052            } else {
1053                unreachable!()
1054            }
1055        })
1056    }
1057    /// Returns `true` if this thunk has already been evaluated.
1058    pub fn is_evaluated(&self) -> bool {
1059        matches!(*self.state.borrow(), ThunkState::Evaluated(_))
1060    }
1061}
1062pub(super) struct SharedThunkInner<T> {
1063    pub(super) value: OnceLock<T>,
1064    thunk: Option<Box<dyn FnOnce() -> T + Send>>,
1065}
1066/// Map a function lazily over a slice, producing results on demand.
1067pub struct LazyMap2<'a, A, B> {
1068    pub(super) data: &'a [A],
1069    pub(super) func: Box<dyn Fn(&A) -> B>,
1070    pub(super) index: usize,
1071}
1072impl<'a, A, B: fmt::Debug> LazyMap2<'a, A, B> {
1073    /// Create a new lazy map over `data` using `f`.
1074    pub fn new<F: Fn(&A) -> B + 'static>(data: &'a [A], f: F) -> Self {
1075        LazyMap2 {
1076            data,
1077            func: Box::new(f),
1078            index: 0,
1079        }
1080    }
1081    /// Get the element at `i` (applies `f` each time; not memoized).
1082    pub fn get(&self, i: usize) -> Option<B> {
1083        self.data.get(i).map(|x| (self.func)(x))
1084    }
1085    /// Collect all results into a Vec.
1086    pub fn collect_all(&self) -> Vec<B> {
1087        self.data.iter().map(|x| (self.func)(x)).collect()
1088    }
1089}