1use crate::layout;
14use std::collections::HashMap;
15use std::sync::Mutex;
16use tidepool_heap::layout as heap_layout;
17
18static LAMBDA_REGISTRY: Mutex<Option<LambdaRegistry>> = Mutex::new(None);
21
22#[derive(Default)]
27pub struct LambdaRegistry {
28 entries: HashMap<usize, String>,
30}
31
32impl LambdaRegistry {
33 pub fn new() -> Self {
34 Self::default()
35 }
36
37 pub fn register(&mut self, code_ptr: usize, name: String) {
39 self.entries.insert(code_ptr, name);
40 }
41
42 pub fn lookup(&self, code_ptr: usize) -> Option<&str> {
44 self.entries.get(&code_ptr).map(|s| s.as_str())
45 }
46
47 pub fn lookup_by_address(&self, addr: usize) -> Option<&str> {
50 let mut best: Option<(usize, &str)> = None;
51 for (&ptr, name) in &self.entries {
52 if ptr <= addr {
53 if let Some((best_ptr, _)) = best {
54 if ptr > best_ptr {
55 best = Some((ptr, name.as_str()));
56 }
57 } else {
58 best = Some((ptr, name.as_str()));
59 }
60 }
61 }
62 best.map(|(_, name)| name)
63 }
64
65 pub fn len(&self) -> usize {
67 self.entries.len()
68 }
69
70 pub fn is_empty(&self) -> bool {
71 self.entries.is_empty()
72 }
73}
74
75pub fn set_lambda_registry(registry: LambdaRegistry) -> Option<LambdaRegistry> {
77 let mut guard = LAMBDA_REGISTRY.lock().unwrap_or_else(|e| e.into_inner());
78 guard.replace(registry)
79}
80
81pub fn clear_lambda_registry() -> Option<LambdaRegistry> {
83 LAMBDA_REGISTRY
84 .lock()
85 .unwrap_or_else(|e| e.into_inner())
86 .take()
87}
88
89pub fn lookup_lambda(code_ptr: usize) -> Option<String> {
91 let guard = LAMBDA_REGISTRY.lock().unwrap_or_else(|e| e.into_inner());
92 guard
93 .as_ref()
94 .and_then(|r| r.lookup(code_ptr))
95 .map(|s| s.to_string())
96}
97
98pub fn lookup_lambda_by_address(addr: usize) -> Option<String> {
100 let guard = LAMBDA_REGISTRY.lock().unwrap_or_else(|e| e.into_inner());
101 guard
102 .as_ref()
103 .and_then(|r| r.lookup_by_address(addr))
104 .map(|s| s.to_string())
105}
106
107pub unsafe fn heap_describe(ptr: *const u8) -> String {
121 if ptr.is_null() {
123 return "NULL".to_string();
124 }
125
126 let tag_byte = *ptr.add(heap_layout::OFFSET_TAG);
127 let size = std::ptr::read_unaligned(ptr.add(heap_layout::OFFSET_SIZE) as *const u16);
128
129 match heap_layout::HeapTag::from_byte(tag_byte) {
130 Some(heap_layout::HeapTag::Lit) => {
131 let lit_tag = *ptr.add(layout::LIT_TAG_OFFSET as usize);
132 let value = *(ptr.add(layout::LIT_VALUE_OFFSET as usize) as *const i64);
133 let tag_name = heap_layout::LitTag::from_byte(lit_tag)
134 .map(|t| t.to_string())
135 .unwrap_or_else(|| format!("?{}", lit_tag));
136 format!("Lit({}, {})", tag_name, value)
137 }
138 Some(heap_layout::HeapTag::Con) => {
139 let con_tag = *(ptr.add(layout::CON_TAG_OFFSET as usize) as *const u64);
140 let num_fields = *(ptr.add(layout::CON_NUM_FIELDS_OFFSET as usize) as *const u16);
141 format!("Con(tag={}, {} fields, size={})", con_tag, num_fields, size)
142 }
143 Some(heap_layout::HeapTag::Closure) => {
144 let code_ptr = *(ptr.add(layout::CLOSURE_CODE_PTR_OFFSET as usize) as *const usize);
145 let num_captured =
146 *(ptr.add(layout::CLOSURE_NUM_CAPTURED_OFFSET as usize) as *const u16);
147 let name = lookup_lambda(code_ptr);
148 let name_str = name
149 .as_deref()
150 .map(|n| format!(" [{}]", n))
151 .unwrap_or_default();
152 format!(
153 "Closure(code=0x{:x}, {} captures, size={}){}",
154 code_ptr, num_captured, size, name_str
155 )
156 }
157 Some(heap_layout::HeapTag::Thunk) => {
158 let state = *ptr.add(layout::THUNK_STATE_OFFSET as usize);
159 format!("Thunk(state={}, size={})", state, size)
160 }
161 None => {
162 format!("INVALID(tag={}, size={}, ptr={:?})", tag_byte, size, ptr)
163 }
164 }
165}
166
167#[derive(Debug)]
171pub enum HeapError {
172 NullPointer,
173 InvalidTag(u8),
174 ZeroSize,
175 NullCodePtr,
177 SizeMismatch {
179 expected_min: u16,
180 actual: u16,
181 },
182 NullField {
184 index: usize,
185 },
186 InvalidFieldTag {
188 index: usize,
189 tag: u8,
190 },
191}
192
193impl std::fmt::Display for HeapError {
194 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
195 match self {
196 HeapError::NullPointer => write!(f, "null pointer"),
197 HeapError::InvalidTag(t) => write!(f, "invalid heap tag: {}", t),
198 HeapError::ZeroSize => write!(f, "zero size"),
199 HeapError::NullCodePtr => write!(f, "null code pointer in closure"),
200 HeapError::SizeMismatch {
201 expected_min,
202 actual,
203 } => {
204 write!(
205 f,
206 "size mismatch: expected >= {}, got {}",
207 expected_min, actual
208 )
209 }
210 HeapError::NullField { index } => write!(f, "null pointer in field {}", index),
211 HeapError::InvalidFieldTag { index, tag } => {
212 write!(f, "field {} has invalid tag: {}", index, tag)
213 }
214 }
215 }
216}
217
218pub unsafe fn heap_validate(ptr: *const u8) -> Result<(), HeapError> {
233 if ptr.is_null() {
235 return Err(HeapError::NullPointer);
236 }
237
238 let tag_byte = *ptr.add(heap_layout::OFFSET_TAG);
239 let size = std::ptr::read_unaligned(ptr.add(heap_layout::OFFSET_SIZE) as *const u16);
240
241 if size == 0 {
242 return Err(HeapError::ZeroSize);
243 }
244
245 match heap_layout::HeapTag::from_byte(tag_byte) {
246 None => return Err(HeapError::InvalidTag(tag_byte)),
247 Some(heap_layout::HeapTag::Closure) => {
248 let code_ptr = *(ptr.add(layout::CLOSURE_CODE_PTR_OFFSET as usize) as *const usize);
249 if code_ptr == 0 {
250 return Err(HeapError::NullCodePtr);
251 }
252 let num_captured =
253 *(ptr.add(layout::CLOSURE_NUM_CAPTURED_OFFSET as usize) as *const u16);
254 let expected_min =
255 (layout::CLOSURE_CAPTURED_OFFSET as usize + 8 * num_captured as usize) as u16;
256 if size < expected_min {
257 return Err(HeapError::SizeMismatch {
258 expected_min,
259 actual: size,
260 });
261 }
262 }
263 Some(heap_layout::HeapTag::Con) => {
264 let num_fields = *(ptr.add(layout::CON_NUM_FIELDS_OFFSET as usize) as *const u16);
265 let expected_min =
266 (layout::CON_FIELDS_OFFSET as usize + 8 * num_fields as usize) as u16;
267 if size < expected_min {
268 return Err(HeapError::SizeMismatch {
269 expected_min,
270 actual: size,
271 });
272 }
273 }
274 Some(heap_layout::HeapTag::Lit) => {
275 if size < layout::LIT_TOTAL_SIZE as u16 {
276 return Err(HeapError::SizeMismatch {
277 expected_min: layout::LIT_TOTAL_SIZE as u16,
278 actual: size,
279 });
280 }
281 }
282 Some(heap_layout::HeapTag::Thunk) => {
283 if size < layout::THUNK_MIN_SIZE as u16 {
285 return Err(HeapError::SizeMismatch {
286 expected_min: layout::THUNK_MIN_SIZE as u16,
287 actual: size,
288 });
289 }
290 }
291 }
292
293 Ok(())
294}
295
296pub struct TracingClosureCaller {
298 pub vmctx: *mut crate::context::VMContext,
299}
300
301impl TracingClosureCaller {
302 pub unsafe fn call(&self, callee: *mut u8, arg: *mut u8) -> Result<*mut u8, String> {
305 if crate::debug::trace_level() >= crate::debug::TraceLevel::Heap {
308 heap_validate(callee).map_err(|e| format!("Closure validation failed: {}", e))?;
309 heap_validate(arg).map_err(|e| format!("Arg validation failed: {}", e))?;
310 }
311
312 let tag_byte = *callee.add(heap_layout::OFFSET_TAG);
313 if tag_byte != layout::TAG_CLOSURE {
314 return Err(format!("Not a closure: tag={}", tag_byte));
315 }
316
317 let code_ptr = *(callee.add(layout::CLOSURE_CODE_PTR_OFFSET as usize) as *const usize);
318 let num_captured =
319 *(callee.add(layout::CLOSURE_NUM_CAPTURED_OFFSET as usize) as *const u16);
320 let name = lookup_lambda(code_ptr);
321
322 if crate::debug::trace_level() >= crate::debug::TraceLevel::Calls {
323 eprintln!(
324 "[trace] CALL {} callee={:?} arg={:?} ({} captures)",
325 name.as_deref().unwrap_or("unknown"),
326 callee,
327 arg,
328 num_captured
329 );
330 }
331
332 let func: unsafe extern "C" fn(
334 *mut crate::context::VMContext,
335 *mut u8,
336 *mut u8,
337 ) -> *mut u8 = std::mem::transmute(code_ptr);
338 let result = func(self.vmctx, callee, arg);
339
340 if crate::debug::trace_level() >= crate::debug::TraceLevel::Calls {
341 eprintln!(
342 "[trace] RET {} result={:?}",
343 name.as_deref().unwrap_or("unknown"),
344 result
345 );
346 }
347
348 if !result.is_null() && crate::debug::trace_level() >= crate::debug::TraceLevel::Heap {
349 heap_validate(result).map_err(|e| format!("Result validation failed: {}", e))?;
350 }
351
352 Ok(result)
353 }
354}
355
356pub unsafe fn heap_validate_deep(ptr: *const u8) -> Result<(), HeapError> {
362 heap_validate(ptr)?;
364
365 let tag_byte = *ptr.add(heap_layout::OFFSET_TAG);
366 match heap_layout::HeapTag::from_byte(tag_byte) {
367 Some(heap_layout::HeapTag::Con) => {
368 let num_fields = *(ptr.add(layout::CON_NUM_FIELDS_OFFSET as usize) as *const u16);
369 for i in 0..num_fields as usize {
370 let field =
371 *(ptr.add(layout::CON_FIELDS_OFFSET as usize + 8 * i) as *const *const u8);
372 if field.is_null() {
373 continue;
374 }
375 let field_tag = *field.add(heap_layout::OFFSET_TAG);
376 if heap_layout::HeapTag::from_byte(field_tag).is_none() {
377 return Err(HeapError::InvalidFieldTag {
378 index: i,
379 tag: field_tag,
380 });
381 }
382 }
383 }
384 Some(heap_layout::HeapTag::Closure) => {
385 let num_captured =
386 *(ptr.add(layout::CLOSURE_NUM_CAPTURED_OFFSET as usize) as *const u16);
387 for i in 0..num_captured as usize {
388 let cap = *(ptr.add(layout::CLOSURE_CAPTURED_OFFSET as usize + 8 * i)
389 as *const *const u8);
390 if cap.is_null() {
391 continue;
392 }
393 let cap_tag = *cap.add(heap_layout::OFFSET_TAG);
394 if heap_layout::HeapTag::from_byte(cap_tag).is_none() {
395 return Err(HeapError::InvalidFieldTag {
396 index: i,
397 tag: cap_tag,
398 });
399 }
400 }
401 }
402 _ => {}
403 }
404 Ok(())
405}
406
407#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
411pub enum TraceLevel {
412 Off,
413 Calls,
414 Scope,
415 Heap,
416}
417
418pub fn trace_level() -> TraceLevel {
420 use std::sync::OnceLock;
421 static LEVEL: OnceLock<TraceLevel> = OnceLock::new();
422 *LEVEL.get_or_init(|| match std::env::var("TIDEPOOL_TRACE").as_deref() {
423 Ok("calls") => TraceLevel::Calls,
424 Ok("scope") => TraceLevel::Scope,
425 Ok("heap") => TraceLevel::Heap,
426 _ => TraceLevel::Off,
427 })
428}