1use tidepool_heap::layout;
14use std::collections::HashMap;
15use std::sync::Mutex;
16
17static LAMBDA_REGISTRY: Mutex<Option<LambdaRegistry>> = Mutex::new(None);
20
21#[derive(Default)]
26pub struct LambdaRegistry {
27 entries: HashMap<usize, String>,
29}
30
31impl LambdaRegistry {
32 pub fn new() -> Self {
33 Self::default()
34 }
35
36 pub fn register(&mut self, code_ptr: usize, name: String) {
38 self.entries.insert(code_ptr, name);
39 }
40
41 pub fn lookup(&self, code_ptr: usize) -> Option<&str> {
43 self.entries.get(&code_ptr).map(|s| s.as_str())
44 }
45
46 pub fn len(&self) -> usize {
48 self.entries.len()
49 }
50
51 pub fn is_empty(&self) -> bool {
52 self.entries.is_empty()
53 }
54}
55
56pub fn set_lambda_registry(registry: LambdaRegistry) -> Option<LambdaRegistry> {
58 let mut guard = LAMBDA_REGISTRY.lock().unwrap();
59 guard.replace(registry)
60}
61
62pub fn clear_lambda_registry() -> Option<LambdaRegistry> {
64 LAMBDA_REGISTRY.lock().unwrap().take()
65}
66
67pub fn lookup_lambda(code_ptr: usize) -> Option<String> {
69 let guard = LAMBDA_REGISTRY.lock().unwrap();
70 guard
71 .as_ref()
72 .and_then(|r| r.lookup(code_ptr))
73 .map(|s| s.to_string())
74}
75
76pub unsafe fn heap_describe(ptr: *const u8) -> String {
90 if ptr.is_null() {
91 return "NULL".to_string();
92 }
93
94 let tag_byte = *ptr.add(layout::OFFSET_TAG);
95 let size = std::ptr::read_unaligned(ptr.add(layout::OFFSET_SIZE) as *const u16);
96
97 match layout::HeapTag::from_byte(tag_byte) {
98 Some(layout::HeapTag::Lit) => {
99 let lit_tag = *ptr.add(layout::LIT_TAG_OFFSET);
100 let value = *(ptr.add(layout::LIT_VALUE_OFFSET) as *const i64);
101 let tag_name = layout::LitTag::from_byte(lit_tag)
102 .map(|t| t.to_string())
103 .unwrap_or_else(|| format!("?{}", lit_tag));
104 format!("Lit({}, {})", tag_name, value)
105 }
106 Some(layout::HeapTag::Con) => {
107 let con_tag = *(ptr.add(layout::CON_TAG_OFFSET) as *const u64);
108 let num_fields = *(ptr.add(layout::CON_NUM_FIELDS_OFFSET) as *const u16);
109 format!("Con(tag={}, {} fields, size={})", con_tag, num_fields, size)
110 }
111 Some(layout::HeapTag::Closure) => {
112 let code_ptr = *(ptr.add(layout::CLOSURE_CODE_PTR_OFFSET) as *const usize);
113 let num_captured = *(ptr.add(layout::CLOSURE_NUM_CAPTURED_OFFSET) as *const u16);
114 let name = lookup_lambda(code_ptr);
115 let name_str = name
116 .as_deref()
117 .map(|n| format!(" [{}]", n))
118 .unwrap_or_default();
119 format!(
120 "Closure(code=0x{:x}, {} captures, size={}){}",
121 code_ptr, num_captured, size, name_str
122 )
123 }
124 Some(layout::HeapTag::Thunk) => {
125 let state = *ptr.add(layout::THUNK_STATE_OFFSET);
126 format!("Thunk(state={}, size={})", state, size)
127 }
128 None => {
129 format!("INVALID(tag={}, size={}, ptr={:?})", tag_byte, size, ptr)
130 }
131 }
132}
133
134#[derive(Debug)]
138pub enum HeapError {
139 NullPointer,
140 InvalidTag(u8),
141 ZeroSize,
142 NullCodePtr,
144 SizeMismatch { expected_min: u16, actual: u16 },
146 NullField { index: usize },
148 InvalidFieldTag { index: usize, tag: u8 },
150}
151
152impl std::fmt::Display for HeapError {
153 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
154 match self {
155 HeapError::NullPointer => write!(f, "null pointer"),
156 HeapError::InvalidTag(t) => write!(f, "invalid heap tag: {}", t),
157 HeapError::ZeroSize => write!(f, "zero size"),
158 HeapError::NullCodePtr => write!(f, "null code pointer in closure"),
159 HeapError::SizeMismatch { expected_min, actual } => {
160 write!(f, "size mismatch: expected >= {}, got {}", expected_min, actual)
161 }
162 HeapError::NullField { index } => write!(f, "null pointer in field {}", index),
163 HeapError::InvalidFieldTag { index, tag } => {
164 write!(f, "field {} has invalid tag: {}", index, tag)
165 }
166 }
167 }
168}
169
170pub unsafe fn heap_validate(ptr: *const u8) -> Result<(), HeapError> {
185 if ptr.is_null() {
186 return Err(HeapError::NullPointer);
187 }
188
189 let tag_byte = *ptr.add(layout::OFFSET_TAG);
190 let size = std::ptr::read_unaligned(ptr.add(layout::OFFSET_SIZE) as *const u16);
191
192 if size == 0 {
193 return Err(HeapError::ZeroSize);
194 }
195
196 match layout::HeapTag::from_byte(tag_byte) {
197 None => return Err(HeapError::InvalidTag(tag_byte)),
198 Some(layout::HeapTag::Closure) => {
199 let code_ptr = *(ptr.add(layout::CLOSURE_CODE_PTR_OFFSET) as *const usize);
200 if code_ptr == 0 {
201 return Err(HeapError::NullCodePtr);
202 }
203 let num_captured = *(ptr.add(layout::CLOSURE_NUM_CAPTURED_OFFSET) as *const u16);
204 let expected_min = (24 + 8 * num_captured as usize) as u16;
205 if size < expected_min {
206 return Err(HeapError::SizeMismatch {
207 expected_min,
208 actual: size,
209 });
210 }
211 }
212 Some(layout::HeapTag::Con) => {
213 let num_fields = *(ptr.add(layout::CON_NUM_FIELDS_OFFSET) as *const u16);
214 let expected_min = (24 + 8 * num_fields as usize) as u16;
215 if size < expected_min {
216 return Err(HeapError::SizeMismatch {
217 expected_min,
218 actual: size,
219 });
220 }
221 }
222 Some(layout::HeapTag::Lit) => {
223 if size < layout::LIT_SIZE as u16 {
224 return Err(HeapError::SizeMismatch {
225 expected_min: layout::LIT_SIZE as u16,
226 actual: size,
227 });
228 }
229 }
230 Some(layout::HeapTag::Thunk) => {
231 if size < 24 {
233 return Err(HeapError::SizeMismatch {
234 expected_min: 24,
235 actual: size,
236 });
237 }
238 }
239 }
240
241 Ok(())
242}
243
244pub unsafe fn heap_validate_deep(ptr: *const u8) -> Result<(), HeapError> {
250 heap_validate(ptr)?;
251
252 let tag_byte = *ptr.add(layout::OFFSET_TAG);
253 match layout::HeapTag::from_byte(tag_byte) {
254 Some(layout::HeapTag::Con) => {
255 let num_fields = *(ptr.add(layout::CON_NUM_FIELDS_OFFSET) as *const u16);
256 for i in 0..num_fields as usize {
257 let field =
258 *(ptr.add(layout::CON_FIELDS_OFFSET + 8 * i) as *const *const u8);
259 if field.is_null() {
260 return Err(HeapError::NullField { index: i });
261 }
262 let field_tag = *field.add(layout::OFFSET_TAG);
263 if layout::HeapTag::from_byte(field_tag).is_none() {
264 return Err(HeapError::InvalidFieldTag {
265 index: i,
266 tag: field_tag,
267 });
268 }
269 }
270 }
271 Some(layout::HeapTag::Closure) => {
272 let num_captured = *(ptr.add(layout::CLOSURE_NUM_CAPTURED_OFFSET) as *const u16);
273 for i in 0..num_captured as usize {
274 let cap =
275 *(ptr.add(layout::CLOSURE_CAPTURED_OFFSET + 8 * i) as *const *const u8);
276 if cap.is_null() {
277 return Err(HeapError::NullField { index: i });
278 }
279 let cap_tag = *cap.add(layout::OFFSET_TAG);
280 if layout::HeapTag::from_byte(cap_tag).is_none() {
281 return Err(HeapError::InvalidFieldTag {
282 index: i,
283 tag: cap_tag,
284 });
285 }
286 }
287 }
288 _ => {}
289 }
290
291 Ok(())
292}
293
294#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
298pub enum TraceLevel {
299 Off,
300 Calls,
301 Heap,
302}
303
304pub fn trace_level() -> TraceLevel {
306 use std::sync::OnceLock;
307 static LEVEL: OnceLock<TraceLevel> = OnceLock::new();
308 *LEVEL.get_or_init(|| match std::env::var("TIDEPOOL_TRACE").as_deref() {
309 Ok("calls") => TraceLevel::Calls,
310 Ok("heap") => TraceLevel::Heap,
311 _ => TraceLevel::Off,
312 })
313}