Skip to main content

tidepool_codegen/
debug.rs

1//! JIT debugging tools.
2//!
3//! Provides reusable infrastructure for debugging JIT-compiled code:
4//! - **LambdaRegistry**: maps code pointers back to lambda names
5//! - **heap_describe**: human-readable description of heap objects
6//! - **heap_validate**: structural integrity checks for heap objects
7//! - **TracingClosureCaller**: wraps closure calls with logging
8//!
9//! Tracing is controlled by the `TIDEPOOL_TRACE` env var:
10//! - `TIDEPOOL_TRACE=calls` — log each closure call (name, arg, result)
11//! - `TIDEPOOL_TRACE=heap` — also validate heap objects before use
12
13use tidepool_heap::layout;
14use std::collections::HashMap;
15use std::sync::Mutex;
16
17// ── Lambda Registry ──────────────────────────────────────────
18
19static LAMBDA_REGISTRY: Mutex<Option<LambdaRegistry>> = Mutex::new(None);
20
21/// Maps JIT code pointers to human-readable lambda names.
22///
23/// Populated during compilation, queried during execution to identify
24/// which closure is being called when debugging crashes.
25#[derive(Default)]
26pub struct LambdaRegistry {
27    /// code_ptr → lambda name
28    entries: HashMap<usize, String>,
29}
30
31impl LambdaRegistry {
32    pub fn new() -> Self {
33        Self::default()
34    }
35
36    /// Register a lambda's code pointer and name.
37    pub fn register(&mut self, code_ptr: usize, name: String) {
38        self.entries.insert(code_ptr, name);
39    }
40
41    /// Look up a lambda name by code pointer.
42    pub fn lookup(&self, code_ptr: usize) -> Option<&str> {
43        self.entries.get(&code_ptr).map(|s| s.as_str())
44    }
45
46    /// Number of registered lambdas.
47    pub fn len(&self) -> usize {
48        self.entries.len()
49    }
50
51    pub fn is_empty(&self) -> bool {
52        self.entries.is_empty()
53    }
54}
55
56/// Install a registry as the global singleton. Returns the old one if any.
57pub fn set_lambda_registry(registry: LambdaRegistry) -> Option<LambdaRegistry> {
58    let mut guard = LAMBDA_REGISTRY.lock().unwrap();
59    guard.replace(registry)
60}
61
62/// Clear the global registry.
63pub fn clear_lambda_registry() -> Option<LambdaRegistry> {
64    LAMBDA_REGISTRY.lock().unwrap().take()
65}
66
67/// Look up a code pointer in the global registry.
68pub fn lookup_lambda(code_ptr: usize) -> Option<String> {
69    let guard = LAMBDA_REGISTRY.lock().unwrap();
70    guard
71        .as_ref()
72        .and_then(|r| r.lookup(code_ptr))
73        .map(|s| s.to_string())
74}
75
76// ── Heap Object Inspection ───────────────────────────────────
77
78/// Describes a heap object in human-readable form.
79///
80/// Returns a string like:
81/// - `Lit(Int#, 42)`
82/// - `Con(tag=12345, 2 fields)`
83/// - `Closure(code=0x..., 3 captures) [repl_lambda_5]`
84/// - `INVALID(tag=255, ptr=0x...)`
85///
86/// # Safety
87///
88/// `ptr` must point to a valid heap object, or at least readable memory.
89pub unsafe fn heap_describe(ptr: *const u8) -> String {
90    if ptr.is_null() {
91        return "NULL".to_string();
92    }
93
94    let tag_byte = *ptr.add(layout::OFFSET_TAG);
95    let size = std::ptr::read_unaligned(ptr.add(layout::OFFSET_SIZE) as *const u16);
96
97    match layout::HeapTag::from_byte(tag_byte) {
98        Some(layout::HeapTag::Lit) => {
99            let lit_tag = *ptr.add(layout::LIT_TAG_OFFSET);
100            let value = *(ptr.add(layout::LIT_VALUE_OFFSET) as *const i64);
101            let tag_name = layout::LitTag::from_byte(lit_tag)
102                .map(|t| t.to_string())
103                .unwrap_or_else(|| format!("?{}", lit_tag));
104            format!("Lit({}, {})", tag_name, value)
105        }
106        Some(layout::HeapTag::Con) => {
107            let con_tag = *(ptr.add(layout::CON_TAG_OFFSET) as *const u64);
108            let num_fields = *(ptr.add(layout::CON_NUM_FIELDS_OFFSET) as *const u16);
109            format!("Con(tag={}, {} fields, size={})", con_tag, num_fields, size)
110        }
111        Some(layout::HeapTag::Closure) => {
112            let code_ptr = *(ptr.add(layout::CLOSURE_CODE_PTR_OFFSET) as *const usize);
113            let num_captured = *(ptr.add(layout::CLOSURE_NUM_CAPTURED_OFFSET) as *const u16);
114            let name = lookup_lambda(code_ptr);
115            let name_str = name
116                .as_deref()
117                .map(|n| format!(" [{}]", n))
118                .unwrap_or_default();
119            format!(
120                "Closure(code=0x{:x}, {} captures, size={}){}",
121                code_ptr, num_captured, size, name_str
122            )
123        }
124        Some(layout::HeapTag::Thunk) => {
125            let state = *ptr.add(layout::THUNK_STATE_OFFSET);
126            format!("Thunk(state={}, size={})", state, size)
127        }
128        None => {
129            format!("INVALID(tag={}, size={}, ptr={:?})", tag_byte, size, ptr)
130        }
131    }
132}
133
134// ── Heap Object Validation ───────────────────────────────────
135
136/// Validation errors for heap objects.
137#[derive(Debug)]
138pub enum HeapError {
139    NullPointer,
140    InvalidTag(u8),
141    ZeroSize,
142    /// Closure has null code pointer
143    NullCodePtr,
144    /// Size field doesn't match expected size for the object type
145    SizeMismatch { expected_min: u16, actual: u16 },
146    /// A field pointer is null
147    NullField { index: usize },
148    /// A field pointer has an invalid heap tag
149    InvalidFieldTag { index: usize, tag: u8 },
150}
151
152impl std::fmt::Display for HeapError {
153    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
154        match self {
155            HeapError::NullPointer => write!(f, "null pointer"),
156            HeapError::InvalidTag(t) => write!(f, "invalid heap tag: {}", t),
157            HeapError::ZeroSize => write!(f, "zero size"),
158            HeapError::NullCodePtr => write!(f, "null code pointer in closure"),
159            HeapError::SizeMismatch { expected_min, actual } => {
160                write!(f, "size mismatch: expected >= {}, got {}", expected_min, actual)
161            }
162            HeapError::NullField { index } => write!(f, "null pointer in field {}", index),
163            HeapError::InvalidFieldTag { index, tag } => {
164                write!(f, "field {} has invalid tag: {}", index, tag)
165            }
166        }
167    }
168}
169
170/// Validate a heap object's structural integrity.
171///
172/// Checks:
173/// - Non-null pointer
174/// - Valid tag byte
175/// - Non-zero size
176/// - Size consistent with field count
177/// - Closure code_ptr is non-null
178///
179/// Does NOT follow field pointers (use `heap_validate_deep` for that).
180///
181/// # Safety
182///
183/// `ptr` must point to readable memory of at least `size` bytes.
184pub unsafe fn heap_validate(ptr: *const u8) -> Result<(), HeapError> {
185    if ptr.is_null() {
186        return Err(HeapError::NullPointer);
187    }
188
189    let tag_byte = *ptr.add(layout::OFFSET_TAG);
190    let size = std::ptr::read_unaligned(ptr.add(layout::OFFSET_SIZE) as *const u16);
191
192    if size == 0 {
193        return Err(HeapError::ZeroSize);
194    }
195
196    match layout::HeapTag::from_byte(tag_byte) {
197        None => return Err(HeapError::InvalidTag(tag_byte)),
198        Some(layout::HeapTag::Closure) => {
199            let code_ptr = *(ptr.add(layout::CLOSURE_CODE_PTR_OFFSET) as *const usize);
200            if code_ptr == 0 {
201                return Err(HeapError::NullCodePtr);
202            }
203            let num_captured = *(ptr.add(layout::CLOSURE_NUM_CAPTURED_OFFSET) as *const u16);
204            let expected_min = (24 + 8 * num_captured as usize) as u16;
205            if size < expected_min {
206                return Err(HeapError::SizeMismatch {
207                    expected_min,
208                    actual: size,
209                });
210            }
211        }
212        Some(layout::HeapTag::Con) => {
213            let num_fields = *(ptr.add(layout::CON_NUM_FIELDS_OFFSET) as *const u16);
214            let expected_min = (24 + 8 * num_fields as usize) as u16;
215            if size < expected_min {
216                return Err(HeapError::SizeMismatch {
217                    expected_min,
218                    actual: size,
219                });
220            }
221        }
222        Some(layout::HeapTag::Lit) => {
223            if size < layout::LIT_SIZE as u16 {
224                return Err(HeapError::SizeMismatch {
225                    expected_min: layout::LIT_SIZE as u16,
226                    actual: size,
227                });
228            }
229        }
230        Some(layout::HeapTag::Thunk) => {
231            // Thunks are at least header + state + code_ptr
232            if size < 24 {
233                return Err(HeapError::SizeMismatch {
234                    expected_min: 24,
235                    actual: size,
236                });
237            }
238        }
239    }
240
241    Ok(())
242}
243
244/// Validate a heap object and all its pointer fields (one level deep).
245///
246/// # Safety
247///
248/// All pointers must be readable.
249pub unsafe fn heap_validate_deep(ptr: *const u8) -> Result<(), HeapError> {
250    heap_validate(ptr)?;
251
252    let tag_byte = *ptr.add(layout::OFFSET_TAG);
253    match layout::HeapTag::from_byte(tag_byte) {
254        Some(layout::HeapTag::Con) => {
255            let num_fields = *(ptr.add(layout::CON_NUM_FIELDS_OFFSET) as *const u16);
256            for i in 0..num_fields as usize {
257                let field =
258                    *(ptr.add(layout::CON_FIELDS_OFFSET + 8 * i) as *const *const u8);
259                if field.is_null() {
260                    return Err(HeapError::NullField { index: i });
261                }
262                let field_tag = *field.add(layout::OFFSET_TAG);
263                if layout::HeapTag::from_byte(field_tag).is_none() {
264                    return Err(HeapError::InvalidFieldTag {
265                        index: i,
266                        tag: field_tag,
267                    });
268                }
269            }
270        }
271        Some(layout::HeapTag::Closure) => {
272            let num_captured = *(ptr.add(layout::CLOSURE_NUM_CAPTURED_OFFSET) as *const u16);
273            for i in 0..num_captured as usize {
274                let cap =
275                    *(ptr.add(layout::CLOSURE_CAPTURED_OFFSET + 8 * i) as *const *const u8);
276                if cap.is_null() {
277                    return Err(HeapError::NullField { index: i });
278                }
279                let cap_tag = *cap.add(layout::OFFSET_TAG);
280                if layout::HeapTag::from_byte(cap_tag).is_none() {
281                    return Err(HeapError::InvalidFieldTag {
282                        index: i,
283                        tag: cap_tag,
284                    });
285                }
286            }
287        }
288        _ => {}
289    }
290
291    Ok(())
292}
293
294// ── Trace Level ──────────────────────────────────────────────
295
296/// Trace level, controlled by `TIDEPOOL_TRACE` env var.
297#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
298pub enum TraceLevel {
299    Off,
300    Calls,
301    Heap,
302}
303
304/// Read the trace level from the environment. Cached after first call.
305pub fn trace_level() -> TraceLevel {
306    use std::sync::OnceLock;
307    static LEVEL: OnceLock<TraceLevel> = OnceLock::new();
308    *LEVEL.get_or_init(|| match std::env::var("TIDEPOOL_TRACE").as_deref() {
309        Ok("calls") => TraceLevel::Calls,
310        Ok("heap") => TraceLevel::Heap,
311        _ => TraceLevel::Off,
312    })
313}