Skip to main content

cljrs_value/
types.rs

1//! Stub types for Phase 4/7 that are referenced by the Value enum.
2
3#![allow(unused)]
4
5use std::collections::HashMap;
6use std::mem;
7use std::sync::{Arc, Condvar, Mutex};
8
9use cljrs_gc::GcPtr;
10use cljrs_reader::Form;
11
12use crate::Value;
13
14// ── No-GC debug provenance helper ────────────────────────────────────────────
15
16/// In `no-gc` debug builds: return `true` if the top-level `GcPtr` inside
17/// `value` (if any) was allocated by the global `StaticArena`.
18///
19/// Primitives (`Nil`, `Bool`, `Long`, `Double`, `Char`) contain no `GcPtr`
20/// and always return `true`.  `Resource` is Arc-managed and also returns
21/// `true`.  All other variants have a `GcPtr` that is checked against the
22/// static arena's chunk range.
23///
24/// This check is intentionally **shallow** (top-level pointer only).  If the
25/// value was produced inside a `StaticCtxGuard`, ALL allocations during its
26/// evaluation go to the static arena — so a static top-level pointer implies
27/// static contents.
28#[cfg(all(feature = "no-gc", debug_assertions))]
29pub(crate) fn value_gcptr_is_static(value: &Value) -> bool {
30    use crate::value::MapValue;
31    use crate::value::SetValue;
32    match value {
33        // Inline scalars — no GcPtr.
34        Value::Nil
35        | Value::Bool(_)
36        | Value::Long(_)
37        | Value::Double(_)
38        | Value::Char(_)
39        | Value::Uuid(_) => true,
40        // Arc-managed — not GcPtr.
41        Value::Resource(_) => true,
42        // GcPtr variants.
43        Value::BigInt(p) => p.is_static_alloc(),
44        Value::BigDecimal(p) => p.is_static_alloc(),
45        Value::Ratio(p) => p.is_static_alloc(),
46        Value::Str(p) => p.is_static_alloc(),
47        Value::Pattern(p) => p.is_static_alloc(),
48        Value::Matcher(p) => p.is_static_alloc(),
49        Value::Symbol(p) => p.is_static_alloc(),
50        Value::Keyword(p) => p.is_static_alloc(),
51        Value::List(p) => p.is_static_alloc(),
52        Value::Vector(p) => p.is_static_alloc(),
53        Value::Queue(p) => p.is_static_alloc(),
54        Value::Map(m) => match m {
55            MapValue::Array(p) => p.is_static_alloc(),
56            MapValue::Hash(p) => p.is_static_alloc(),
57            MapValue::Sorted(p) => p.is_static_alloc(),
58        },
59        Value::Set(s) => match s {
60            SetValue::Hash(p) => p.is_static_alloc(),
61            SetValue::Sorted(p) => p.is_static_alloc(),
62        },
63        Value::NativeFunction(p) => p.is_static_alloc(),
64        Value::Fn(p) | Value::Macro(p) => p.is_static_alloc(),
65        Value::BoundFn(p) => p.is_static_alloc(),
66        Value::Var(p) => p.is_static_alloc(),
67        Value::Atom(p) => p.is_static_alloc(),
68        Value::Namespace(p) => p.is_static_alloc(),
69        Value::LazySeq(p) => p.is_static_alloc(),
70        Value::Cons(p) => p.is_static_alloc(),
71        Value::Protocol(p) => p.is_static_alloc(),
72        Value::ProtocolFn(p) => p.is_static_alloc(),
73        Value::MultiFn(p) => p.is_static_alloc(),
74        Value::Volatile(p) => p.is_static_alloc(),
75        Value::Delay(p) => p.is_static_alloc(),
76        Value::Promise(p) => p.is_static_alloc(),
77        Value::Future(p) => p.is_static_alloc(),
78        Value::Agent(p) => p.is_static_alloc(),
79        Value::TypeInstance(p) => p.is_static_alloc(),
80        Value::ObjectArray(p) => p.is_static_alloc(),
81        Value::NativeObject(p) => p.is_static_alloc(),
82        Value::Error(p) => p.is_static_alloc(),
83        Value::TransientMap(p) => p.is_static_alloc(),
84        Value::TransientVector(p) => p.is_static_alloc(),
85        Value::TransientSet(p) => p.is_static_alloc(),
86        // Primitive arrays — no meaningful pointer check needed.
87        Value::BooleanArray(_)
88        | Value::ByteArray(_)
89        | Value::ShortArray(_)
90        | Value::IntArray(_)
91        | Value::LongArray(_)
92        | Value::FloatArray(_)
93        | Value::DoubleArray(_)
94        | Value::CharArray(_) => true,
95        // Wrapper variants.
96        Value::Reduced(inner) | Value::WithMeta(inner, _) => value_gcptr_is_static(inner),
97    }
98}
99
100// ── Protocol ──────────────────────────────────────────────────────────────────
101
102/// Inner map type for protocol implementations: method_name → impl fn.
103pub type MethodMap = HashMap<Arc<str>, Value>;
104
105/// A Clojure protocol — an interface-like construct with named methods.
106#[derive(Debug)]
107pub struct Protocol {
108    pub name: Arc<str>,
109    pub ns: Arc<str>,
110    pub methods: Vec<ProtocolMethod>,
111    /// type_tag → { method_name → impl fn }
112    pub impls: Mutex<HashMap<Arc<str>, MethodMap>>,
113}
114
115impl Protocol {
116    pub fn new(name: Arc<str>, ns: Arc<str>, methods: Vec<ProtocolMethod>) -> Self {
117        Self {
118            name,
119            ns,
120            methods,
121            impls: Mutex::new(HashMap::new()),
122        }
123    }
124}
125
126impl cljrs_gc::Trace for Protocol {
127    fn trace(&self, visitor: &mut cljrs_gc::MarkVisitor) {
128        {
129            let impls = self.impls.lock().unwrap();
130            for method_map in impls.values() {
131                for v in method_map.values() {
132                    v.trace(visitor);
133                }
134            }
135        }
136    }
137}
138
139/// One method signature declared in a `defprotocol`.
140#[derive(Debug, Clone)]
141pub struct ProtocolMethod {
142    pub name: Arc<str>,
143    pub min_arity: usize,
144    pub variadic: bool,
145}
146
147impl cljrs_gc::Trace for ProtocolMethod {
148    fn trace(&self, _: &mut cljrs_gc::MarkVisitor) {}
149}
150
151// ── ProtocolFn ────────────────────────────────────────────────────────────────
152
153/// Callable that dispatches a single protocol method on the type of `args[0]`.
154#[derive(Debug)]
155pub struct ProtocolFn {
156    pub protocol: GcPtr<Protocol>,
157    pub method_name: Arc<str>,
158    pub min_arity: usize,
159    pub variadic: bool,
160}
161
162impl cljrs_gc::Trace for ProtocolFn {
163    fn trace(&self, visitor: &mut cljrs_gc::MarkVisitor) {
164        use cljrs_gc::GcVisitor as _;
165        visitor.visit(&self.protocol);
166    }
167}
168
169// ── MultiFn ───────────────────────────────────────────────────────────────────
170
171/// A Clojure multimethod — arbitrary dispatch via a user-supplied function.
172#[derive(Debug)]
173pub struct MultiFn {
174    pub name: Arc<str>,
175    pub dispatch_fn: Value,
176    /// pr_str(dispatch-val) → implementation fn
177    pub methods: Mutex<HashMap<String, Value>>,
178    /// recorded preferences (for future derive/hierarchy)
179    pub prefers: Mutex<HashMap<String, Vec<String>>>,
180    /// normally ":default"
181    pub default_dispatch: String,
182}
183
184impl MultiFn {
185    pub fn new(name: Arc<str>, dispatch_fn: Value, default_dispatch: String) -> Self {
186        Self {
187            name,
188            dispatch_fn,
189            methods: Mutex::new(HashMap::new()),
190            prefers: Mutex::new(HashMap::new()),
191            default_dispatch,
192        }
193    }
194}
195
196impl cljrs_gc::Trace for MultiFn {
197    fn trace(&self, visitor: &mut cljrs_gc::MarkVisitor) {
198        self.dispatch_fn.trace(visitor);
199        {
200            let methods = self.methods.lock().unwrap();
201            for v in methods.values() {
202                v.trace(visitor);
203            }
204        }
205    }
206}
207
208// ── Var ───────────────────────────────────────────────────────────────────────
209
210/// A Clojure var — a namespace-interned mutable root binding.
211#[derive(Debug)]
212pub struct Var {
213    pub namespace: Arc<str>,
214    pub name: Arc<str>,
215    pub value: Mutex<Option<Value>>,
216    pub is_macro: bool,
217    /// Metadata map (e.g. `{:dynamic true}`).
218    pub meta: Mutex<Option<Value>>,
219    pub watches: Mutex<Vec<(Value, Value)>>,
220}
221
222impl Var {
223    pub fn new(namespace: impl Into<Arc<str>>, name: impl Into<Arc<str>>) -> Self {
224        Self {
225            namespace: namespace.into(),
226            name: name.into(),
227            value: Mutex::new(None),
228            is_macro: false,
229            meta: Mutex::new(None),
230            watches: Mutex::new(Vec::new()),
231        }
232    }
233
234    pub fn is_bound(&self) -> bool {
235        self.value.lock().unwrap().is_some()
236    }
237
238    pub fn deref(&self) -> Option<Value> {
239        self.value.lock().unwrap().clone()
240    }
241
242    pub fn bind(&self, v: Value) {
243        // In no-gc debug builds: assert the value being stored in this
244        // program-lifetime Var came from the StaticArena, not a scratch region.
245        // A region-local pointer would dangle after the function returns.
246        #[cfg(all(feature = "no-gc", debug_assertions))]
247        debug_assert!(
248            value_gcptr_is_static(&v),
249            "no-gc: Var::bind({}/{}) received a region-local value — store violations \
250             indicate a missing StaticCtxGuard around the value expression",
251            self.namespace,
252            self.name
253        );
254        *self.value.lock().unwrap() = Some(v);
255    }
256
257    pub fn get_meta(&self) -> Option<Value> {
258        self.meta.lock().unwrap().clone()
259    }
260
261    pub fn set_meta(&self, m: Value) {
262        *self.meta.lock().unwrap() = Some(m);
263    }
264
265    pub fn full_name(&self) -> String {
266        format!("{}/{}", self.namespace, self.name)
267    }
268}
269
270impl cljrs_gc::Trace for Var {
271    fn trace(&self, visitor: &mut cljrs_gc::MarkVisitor) {
272        {
273            let value = self.value.lock().unwrap();
274            if let Some(v) = value.as_ref() {
275                v.trace(visitor);
276            }
277        }
278        {
279            let meta = self.meta.lock().unwrap();
280            if let Some(m) = meta.as_ref() {
281                m.trace(visitor);
282            }
283        }
284        {
285            let watches = self.watches.lock().unwrap();
286            for (key, f) in watches.iter() {
287                key.trace(visitor);
288                f.trace(visitor);
289            }
290        }
291    }
292}
293
294// ── Atom ──────────────────────────────────────────────────────────────────────
295
296/// A Clojure atom — a thread-safe mutable reference.
297#[derive(Debug)]
298pub struct Atom {
299    pub value: Mutex<Value>,
300    pub meta: Mutex<Option<Value>>,
301    pub validator: Mutex<Option<Value>>,
302    pub watches: Mutex<Vec<(Value, Value)>>,
303}
304
305impl Atom {
306    pub fn new(v: Value) -> Self {
307        Self {
308            value: Mutex::new(v),
309            meta: Mutex::new(None),
310            validator: Mutex::new(None),
311            watches: Mutex::new(Vec::new()),
312        }
313    }
314
315    pub fn deref(&self) -> Value {
316        self.value.lock().unwrap().clone()
317    }
318
319    pub fn reset(&self, v: Value) -> Value {
320        // In no-gc debug builds: assert the new value came from the StaticArena.
321        #[cfg(all(feature = "no-gc", debug_assertions))]
322        debug_assert!(
323            value_gcptr_is_static(&v),
324            "no-gc: Atom::reset() received a region-local value — the new-value \
325             expression must be computed inside a StaticCtxGuard (i.e. inside \
326             the swap! / reset! call) so it is allocated in the static arena"
327        );
328        let mut guard = self.value.lock().unwrap();
329        *guard = v.clone();
330        v
331    }
332
333    pub fn get_meta(&self) -> Option<Value> {
334        self.meta.lock().unwrap().clone()
335    }
336
337    pub fn set_meta(&self, m: Option<Value>) {
338        *self.meta.lock().unwrap() = m;
339    }
340
341    pub fn get_validator(&self) -> Option<Value> {
342        self.validator.lock().unwrap().clone()
343    }
344
345    pub fn set_validator(&self, vf: Option<Value>) {
346        *self.validator.lock().unwrap() = vf;
347    }
348}
349
350impl cljrs_gc::Trace for Atom {
351    fn trace(&self, visitor: &mut cljrs_gc::MarkVisitor) {
352        {
353            let value = self.value.lock().unwrap();
354            value.trace(visitor);
355        }
356        {
357            let meta = self.meta.lock().unwrap();
358            if let Some(m) = meta.as_ref() {
359                m.trace(visitor);
360            }
361        }
362        {
363            let validator = self.validator.lock().unwrap();
364            if let Some(vf) = validator.as_ref() {
365                vf.trace(visitor);
366            }
367        }
368        {
369            let watches = self.watches.lock().unwrap();
370            for (key, f) in watches.iter() {
371                key.trace(visitor);
372                f.trace(visitor);
373            }
374        }
375    }
376}
377
378// ── Namespace ─────────────────────────────────────────────────────────────────
379
380/// A Clojure namespace with intern table, refers, and aliases.
381#[derive(Debug)]
382pub struct Namespace {
383    pub name: Arc<str>,
384    /// Vars interned directly in this namespace.
385    pub interns: Mutex<HashMap<Arc<str>, GcPtr<Var>>>,
386    /// Vars referred from other namespaces (e.g. clojure.core).
387    pub refers: Mutex<HashMap<Arc<str>, GcPtr<Var>>>,
388    /// Namespace aliases: short-name → full namespace name.
389    pub aliases: Mutex<HashMap<Arc<str>, Arc<str>>>,
390}
391
392impl Namespace {
393    pub fn new(name: impl Into<Arc<str>>) -> Self {
394        Self {
395            name: name.into(),
396            interns: Mutex::new(HashMap::new()),
397            refers: Mutex::new(HashMap::new()),
398            aliases: Mutex::new(HashMap::new()),
399        }
400    }
401}
402
403impl cljrs_gc::Trace for Namespace {
404    fn trace(&self, visitor: &mut cljrs_gc::MarkVisitor) {
405        use cljrs_gc::GcVisitor as _;
406        {
407            let interns = self.interns.lock().unwrap();
408            for var in interns.values() {
409                visitor.visit(var);
410            }
411        }
412        {
413            let refers = self.refers.lock().unwrap();
414            for var in refers.values() {
415                visitor.visit(var);
416            }
417        }
418    }
419}
420
421// ── NativeFn ──────────────────────────────────────────────────────────────────
422
423/// A Rust function callable from Clojure.
424/// Legacy type alias kept for source compatibility. Bare `fn` pointers
425/// implement `Fn` and can be passed anywhere a `NativeFnFunc` is expected.
426pub type NativeFnPtr = fn(&[Value]) -> crate::error::ValueResult<Value>;
427
428/// The callable stored inside a `NativeFn`. Supports both bare function
429/// pointers and closures that capture state.
430pub type NativeFnFunc = Arc<dyn Fn(&[Value]) -> crate::error::ValueResult<Value> + Send + Sync>;
431
432#[derive(Clone, Debug)]
433pub enum Arity {
434    Fixed(usize),
435    Variadic { min: usize },
436}
437
438pub struct NativeFn {
439    pub name: Arc<str>,
440    pub arity: Arity,
441    pub func: NativeFnFunc,
442}
443
444impl NativeFn {
445    /// Create from a bare function pointer (backwards-compatible).
446    pub fn new(name: impl Into<Arc<str>>, arity: Arity, func: NativeFnPtr) -> Self {
447        Self {
448            name: name.into(),
449            arity,
450            func: Arc::new(func),
451        }
452    }
453
454    /// Create from a closure or any `Fn(&[Value]) -> ValueResult<Value>`.
455    pub fn with_closure(
456        name: impl Into<Arc<str>>,
457        arity: Arity,
458        func: impl Fn(&[Value]) -> crate::error::ValueResult<Value> + Send + Sync + 'static,
459    ) -> Self {
460        Self {
461            name: name.into(),
462            arity,
463            func: Arc::new(func),
464        }
465    }
466}
467
468impl std::fmt::Debug for NativeFn {
469    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
470        f.debug_struct("NativeFn")
471            .field("name", &self.name)
472            .field("arity", &self.arity)
473            .field("func", &"<fn>")
474            .finish()
475    }
476}
477
478impl cljrs_gc::Trace for NativeFn {
479    fn trace(&self, _: &mut cljrs_gc::MarkVisitor) {}
480}
481
482// ── CljxFnArity ───────────────────────────────────────────────────────────────
483
484/// One arity branch of a Clojure function.
485#[derive(Debug, Clone)]
486pub struct CljxFnArity {
487    /// Simple parameter names (no `&`).
488    /// For destructured params, these are gensym'd names.
489    pub params: Vec<Arc<str>>,
490    /// The name after `&`, if any.
491    pub rest_param: Option<Arc<str>>,
492    /// The body forms for this arity.
493    pub body: Vec<Form>,
494    /// Destructuring patterns: (param_index, original_form).
495    /// After binding the gensym'd param, these patterns are applied
496    /// via `bind_pattern` to destructure the value.
497    pub destructure_params: Vec<(usize, Form)>,
498    /// If the rest param is destructured, the original form.
499    pub destructure_rest: Option<Form>,
500    /// Unique ID for IR cache lookup (assigned by the evaluator).
501    pub ir_arity_id: u64,
502}
503
504// ── CljxFn ────────────────────────────────────────────────────────────────────
505
506/// An interpreted Clojure closure with captured environment.
507#[derive(Debug, Clone)]
508pub struct CljxFn {
509    pub name: Option<Arc<str>>,
510    pub arities: Vec<CljxFnArity>,
511    /// Names of closed-over bindings (parallel to `closed_over_vals`).
512    pub closed_over_names: Vec<Arc<str>>,
513    /// Values of closed-over bindings (parallel to `closed_over_names`).
514    pub closed_over_vals: Vec<Value>,
515    /// True if this function was defined with `defmacro`.
516    pub is_macro: bool,
517    /// Namespace in which this function was defined (for macro hygiene).
518    pub defining_ns: Arc<str>,
519}
520
521impl CljxFn {
522    pub fn new(
523        name: Option<Arc<str>>,
524        arities: Vec<CljxFnArity>,
525        closed_over_names: Vec<Arc<str>>,
526        closed_over_vals: Vec<Value>,
527        is_macro: bool,
528        defining_ns: Arc<str>,
529    ) -> Self {
530        Self {
531            name,
532            arities,
533            closed_over_names,
534            closed_over_vals,
535            is_macro,
536            defining_ns,
537        }
538    }
539}
540
541impl cljrs_gc::Trace for CljxFn {
542    fn trace(&self, visitor: &mut cljrs_gc::MarkVisitor) {
543        for v in &self.closed_over_vals {
544            v.trace(visitor);
545        }
546    }
547}
548
549// ── BoundFn ──────────────────────────────────────────────────────────────────
550
551/// A function wrapped with captured dynamic bindings.
552/// When called, the captured bindings are pushed as a frame before delegating
553/// to the wrapped function. This means captured bindings override the caller's
554/// for the same var, but vars not in the capture fall through normally.
555#[derive(Debug)]
556pub struct BoundFn {
557    /// The wrapped callable.
558    pub wrapped: Value,
559    /// Captured dynamic bindings (merged flat frame; opaque to cljrs-value).
560    pub captured_bindings: HashMap<usize, Value>,
561}
562
563impl cljrs_gc::Trace for BoundFn {
564    fn trace(&self, visitor: &mut cljrs_gc::MarkVisitor) {
565        self.wrapped.trace(visitor);
566        for val in self.captured_bindings.values() {
567            val.trace(visitor);
568        }
569    }
570}
571
572// ── Thunk / LazySeq ───────────────────────────────────────────────────────────
573
574/// A deferred computation that produces a `Value` when forced.
575pub trait Thunk: Send + Sync + std::fmt::Debug + cljrs_gc::Trace {
576    fn force(&self) -> Result<Value, String>;
577}
578
579/// Internal state of a lazy sequence cell.
580pub enum LazySeqState {
581    /// Thunk not yet evaluated.
582    Pending(Box<dyn Thunk>),
583    /// Result cached after first force.
584    Forced(Value),
585    /// Thunk evaluation failed; error message is cached.
586    Error(String),
587}
588
589/// A lazy sequence that forces its thunk exactly once and caches the result.
590pub struct LazySeq {
591    pub state: Mutex<LazySeqState>,
592}
593
594impl LazySeq {
595    pub fn new(thunk: Box<dyn Thunk>) -> Self {
596        Self {
597            state: Mutex::new(LazySeqState::Pending(thunk)),
598        }
599    }
600
601    /// Realize the sequence: force the thunk on first call, return cached value on subsequent calls.
602    /// On error, returns `Value::Nil` and caches the error (retrievable via `error()`).
603    pub fn realize(&self) -> Value {
604        let thunk = {
605            let mut guard = self.state.lock().unwrap();
606            match &*guard {
607                LazySeqState::Forced(v) => return v.clone(),
608                LazySeqState::Error(_) => return Value::Nil,
609                LazySeqState::Pending(_) => {}
610            }
611            // Replace the pending state with a temporary Forced(Nil), extract the thunk.
612            let prev = mem::replace(&mut *guard, LazySeqState::Forced(Value::Nil));
613            let LazySeqState::Pending(thunk) = prev else {
614                unreachable!("state was not Pending")
615            };
616            thunk
617            // guard dropped here — lock released before forcing
618        };
619        // Force the thunk WITHOUT holding the lock. This ensures GC's
620        // lock().unwrap() in LazySeq::trace() will not deadlock.
621        match thunk.force() {
622            Ok(result) => {
623                *self.state.lock().unwrap() = LazySeqState::Forced(result.clone());
624                result
625            }
626            Err(msg) => {
627                *self.state.lock().unwrap() = LazySeqState::Error(msg);
628                Value::Nil
629            }
630        }
631    }
632
633    /// Return the cached error message, if the thunk failed.
634    pub fn error(&self) -> Option<String> {
635        let guard = self.state.lock().unwrap();
636        if let LazySeqState::Error(e) = &*guard {
637            Some(e.clone())
638        } else {
639            None
640        }
641    }
642}
643
644impl std::fmt::Debug for LazySeq {
645    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
646        write!(f, "LazySeq(...)")
647    }
648}
649
650impl cljrs_gc::Trace for LazySeq {
651    fn trace(&self, visitor: &mut cljrs_gc::MarkVisitor) {
652        // Safe to lock unconditionally: realize() drops the lock before entering
653        // eval (thunk.force()), so the lock is never held across a GC safepoint.
654        {
655            let state = self.state.lock().unwrap();
656            match &*state {
657                LazySeqState::Pending(thunk) => thunk.trace(visitor),
658                LazySeqState::Forced(v) => v.trace(visitor),
659                LazySeqState::Error(_) => {}
660            }
661        }
662    }
663}
664
665// ── CljxCons ──────────────────────────────────────────────────────────────────
666
667/// A lazy cons cell: head element + tail (may be a `LazySeq`, `List`, or `Nil`).
668///
669/// Used when `cons` is called with a `LazySeq` or `Cons` tail, enabling lazy
670/// sequences without eagerly realizing them.
671#[derive(Debug, Clone)]
672pub struct CljxCons {
673    pub head: Value,
674    pub tail: Value,
675}
676
677impl cljrs_gc::Trace for CljxCons {
678    fn trace(&self, visitor: &mut cljrs_gc::MarkVisitor) {
679        self.head.trace(visitor);
680        self.tail.trace(visitor);
681    }
682}
683
684// ── Volatile ──────────────────────────────────────────────────────────────────
685
686/// Non-atomic mutable cell (single-thread performance, no CAS).
687pub struct Volatile {
688    pub value: Mutex<Value>,
689}
690
691impl Volatile {
692    pub fn new(v: Value) -> Self {
693        Self {
694            value: Mutex::new(v),
695        }
696    }
697
698    pub fn deref(&self) -> Value {
699        self.value.lock().unwrap().clone()
700    }
701
702    pub fn reset(&self, v: Value) -> Value {
703        // In no-gc debug builds: assert the new value came from the StaticArena.
704        #[cfg(all(feature = "no-gc", debug_assertions))]
705        debug_assert!(
706            value_gcptr_is_static(&v),
707            "no-gc: Volatile::reset() received a region-local value — ensure the \
708             new-value expression is inside a StaticCtxGuard (vreset! handles this)"
709        );
710        *self.value.lock().unwrap() = v.clone();
711        v
712    }
713}
714
715impl std::fmt::Debug for Volatile {
716    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
717        write!(f, "Volatile")
718    }
719}
720
721impl cljrs_gc::Trace for Volatile {
722    fn trace(&self, visitor: &mut cljrs_gc::MarkVisitor) {
723        {
724            let value = self.value.lock().unwrap();
725            value.trace(visitor);
726        }
727    }
728}
729
730// ── Delay ─────────────────────────────────────────────────────────────────────
731
732/// Internal state of a delay cell.
733pub enum DelayState {
734    Pending(Box<dyn Thunk>),
735    Forced(Value),
736}
737
738/// A lazy one-time computation (forced at most once, result cached).
739pub struct Delay {
740    pub state: Mutex<DelayState>,
741}
742
743impl Delay {
744    pub fn new(thunk: Box<dyn Thunk>) -> Self {
745        Self {
746            state: Mutex::new(DelayState::Pending(thunk)),
747        }
748    }
749
750    /// Force the delay and cache the result.
751    /// Returns the value on success, or an error message on failure.
752    pub fn force(&self) -> Result<Value, String> {
753        let thunk = {
754            let mut guard = self.state.lock().unwrap();
755            if let DelayState::Forced(v) = &*guard {
756                return Ok(v.clone());
757            }
758            let prev = mem::replace(&mut *guard, DelayState::Forced(Value::Nil));
759            let DelayState::Pending(thunk) = prev else {
760                unreachable!("state was not Pending")
761            };
762            thunk
763            // guard dropped here — lock released before forcing
764        };
765        // Force the thunk WITHOUT holding the lock so GC's lock().unwrap() in
766        // Delay::trace() will not deadlock.
767        let result = thunk.force()?;
768        *self.state.lock().unwrap() = DelayState::Forced(result.clone());
769        Ok(result)
770    }
771
772    /// True if the delay has already been forced.
773    pub fn is_realized(&self) -> bool {
774        matches!(&*self.state.lock().unwrap(), DelayState::Forced(_))
775    }
776}
777
778impl std::fmt::Debug for Delay {
779    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
780        write!(f, "Delay")
781    }
782}
783
784impl cljrs_gc::Trace for Delay {
785    fn trace(&self, visitor: &mut cljrs_gc::MarkVisitor) {
786        // Safe to lock unconditionally: force() drops the lock before entering
787        // eval (thunk.force()), so the lock is never held across a GC safepoint.
788        {
789            let state = self.state.lock().unwrap();
790            match &*state {
791                DelayState::Pending(thunk) => thunk.trace(visitor),
792                DelayState::Forced(v) => v.trace(visitor),
793            }
794        }
795    }
796}
797
798// ── CljxPromise ───────────────────────────────────────────────────────────────
799
800/// A one-shot rendezvous (promise).
801pub struct CljxPromise {
802    pub value: Mutex<Option<Value>>,
803    pub cond: Condvar,
804}
805
806impl CljxPromise {
807    pub fn new() -> Self {
808        Self {
809            value: Mutex::new(None),
810            cond: Condvar::new(),
811        }
812    }
813
814    /// Deliver a value (no-op if already delivered).
815    pub fn deliver(&self, v: Value) {
816        let mut guard = self.value.lock().unwrap();
817        if guard.is_none() {
818            *guard = Some(v);
819            self.cond.notify_all();
820        }
821    }
822
823    /// Block until a value is available, then return it.
824    pub fn deref_blocking(&self) -> Value {
825        let mut guard = self.value.lock().unwrap();
826        while guard.is_none() {
827            guard = self.cond.wait(guard).unwrap();
828        }
829        guard.as_ref().unwrap().clone()
830    }
831
832    /// True if already delivered.
833    pub fn is_realized(&self) -> bool {
834        self.value.lock().unwrap().is_some()
835    }
836}
837
838impl Default for CljxPromise {
839    fn default() -> Self {
840        Self::new()
841    }
842}
843
844impl std::fmt::Debug for CljxPromise {
845    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
846        write!(f, "Promise")
847    }
848}
849
850impl cljrs_gc::Trace for CljxPromise {
851    fn trace(&self, visitor: &mut cljrs_gc::MarkVisitor) {
852        {
853            let value = self.value.lock().unwrap();
854            if let Some(v) = value.as_ref() {
855                v.trace(visitor);
856            }
857        }
858    }
859}
860
861// ── CljxFuture ────────────────────────────────────────────────────────────────
862
863/// Thread-pool future state.
864pub enum FutureState {
865    Running,
866    Done(Value),
867    Failed(String),
868    Cancelled,
869}
870
871/// A future value computed asynchronously on another thread.
872pub struct CljxFuture {
873    pub state: Mutex<FutureState>,
874    pub cond: Condvar,
875}
876
877impl CljxFuture {
878    pub fn new() -> Self {
879        Self {
880            state: Mutex::new(FutureState::Running),
881            cond: Condvar::new(),
882        }
883    }
884
885    /// True if done, failed, or cancelled (not still running).
886    pub fn is_done(&self) -> bool {
887        !matches!(&*self.state.lock().unwrap(), FutureState::Running)
888    }
889
890    /// True if explicitly cancelled.
891    pub fn is_cancelled(&self) -> bool {
892        matches!(&*self.state.lock().unwrap(), FutureState::Cancelled)
893    }
894}
895
896impl Default for CljxFuture {
897    fn default() -> Self {
898        Self::new()
899    }
900}
901
902impl std::fmt::Debug for CljxFuture {
903    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
904        write!(f, "Future")
905    }
906}
907
908impl cljrs_gc::Trace for CljxFuture {
909    fn trace(&self, visitor: &mut cljrs_gc::MarkVisitor) {
910        {
911            let state = self.state.lock().unwrap();
912            if let FutureState::Done(v) = &*state {
913                v.trace(visitor);
914            }
915        }
916    }
917}
918
919// ── Agent ─────────────────────────────────────────────────────────────────────
920
921/// A Clojure agent action: takes the current state, returns the new state.
922pub type AgentFn = Box<dyn FnOnce(Value) -> Result<Value, Value> + Send>;
923
924/// Messages sent to an agent's worker thread.
925pub enum AgentMsg {
926    Update(AgentFn),
927    Shutdown,
928}
929
930/// A Clojure agent — asynchronous state update queue.
931pub struct Agent {
932    /// Current state, shared between the Value::Agent handle and the worker thread.
933    pub state: Arc<Mutex<Value>>,
934    /// Last error, shared similarly.
935    pub error: Arc<Mutex<Option<Value>>>,
936    /// Channel to send actions to the worker thread.
937    pub sender: Mutex<std::sync::mpsc::SyncSender<AgentMsg>>,
938    pub watches: Mutex<Vec<(Value, Value)>>,
939}
940
941impl Agent {
942    pub fn get_state(&self) -> Value {
943        self.state.lock().unwrap().clone()
944    }
945
946    pub fn get_error(&self) -> Option<Value> {
947        self.error.lock().unwrap().clone()
948    }
949
950    pub fn clear_error(&self) {
951        *self.error.lock().unwrap() = None;
952    }
953}
954
955impl std::fmt::Debug for Agent {
956    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
957        write!(f, "Agent")
958    }
959}
960
961impl cljrs_gc::Trace for Agent {
962    fn trace(&self, visitor: &mut cljrs_gc::MarkVisitor) {
963        {
964            let state = self.state.lock().unwrap();
965            state.trace(visitor);
966        }
967        {
968            let error = self.error.lock().unwrap();
969            if let Some(e) = error.as_ref() {
970                e.trace(visitor);
971            }
972        }
973        {
974            let watches = self.watches.lock().unwrap();
975            for (key, f) in watches.iter() {
976                key.trace(visitor);
977                f.trace(visitor);
978            }
979        }
980    }
981}