ghostscope_compiler/ebpf/
dwarf_bridge.rs

1//! DWARF debugging information bridge
2//!
3//! This module handles integration with DWARF debug information for
4//! variable type resolution and evaluation result processing.
5
6use super::context::{CodeGenError, EbpfContext, Result};
7use ghostscope_dwarf::{
8    ComputeStep, DirectValueResult, EvaluationResult, LocationResult, MemoryAccessSize, TypeInfo,
9    VariableWithEvaluation,
10};
11use ghostscope_process::cookie;
12use inkwell::values::{BasicValueEnum, IntValue, PointerValue};
13use tracing::{debug, warn};
14
15impl<'ctx> EbpfContext<'ctx> {
16    /// Compute a stable cookie for a module when per-PID offsets are unavailable (via coordinator).
17    fn fallback_cookie_from_module_path(&self, module_path: &str) -> u64 {
18        cookie::from_path(module_path)
19    }
20
21    /// Compute section code for an address within a module (text=0, rodata=1, data=2, bss=3).
22    fn section_code_for_address(&mut self, module_path: &str, link_addr: u64) -> u8 {
23        if let Some(analyzer_ptr) = self.process_analyzer {
24            let analyzer = unsafe { &mut *analyzer_ptr };
25            if let Some(st) = analyzer.classify_section_for_address(module_path, link_addr) {
26                return match st {
27                    ghostscope_dwarf::core::SectionType::Text => 0,
28                    ghostscope_dwarf::core::SectionType::Rodata => 1,
29                    ghostscope_dwarf::core::SectionType::Data => 2,
30                    ghostscope_dwarf::core::SectionType::Bss => 3,
31                    _ => 2,
32                };
33            }
34        }
35        2
36    }
37
38    /// Compute cookie for module using coordinator policy.
39    fn cookie_for_module_or_fallback(&mut self, module_path: &str) -> u64 {
40        self.fallback_cookie_from_module_path(module_path)
41    }
42    /// Helper: unwrap typedef/qualified wrappers to the underlying type
43    fn unwrap_type_aliases(mut t: &TypeInfo) -> &TypeInfo {
44        loop {
45            match t {
46                TypeInfo::TypedefType {
47                    underlying_type, ..
48                } => t = underlying_type.as_ref(),
49                TypeInfo::QualifiedType {
50                    underlying_type, ..
51                } => t = underlying_type.as_ref(),
52                _ => break,
53            }
54        }
55        t
56    }
57
58    /// Helper: determine if a DWARF type represents an aggregate (struct/union/array)
59    fn is_aggregate_type(&self, t: &TypeInfo) -> bool {
60        matches!(
61            Self::unwrap_type_aliases(t),
62            TypeInfo::StructType { .. } | TypeInfo::UnionType { .. } | TypeInfo::ArrayType { .. }
63        )
64    }
65    /// Convert EvaluationResult to LLVM value
66    pub fn evaluate_result_to_llvm_value(
67        &mut self,
68        evaluation_result: &EvaluationResult,
69        dwarf_type: &TypeInfo,
70        var_name: &str,
71        pc_address: u64,
72    ) -> Result<BasicValueEnum<'ctx>> {
73        debug!(
74            "Converting EvaluationResult to LLVM value for variable: {}",
75            var_name
76        );
77        debug!("Evaluation context PC address: 0x{:x}", pc_address);
78
79        // Get pt_regs parameter
80        let pt_regs_ptr = self.get_pt_regs_parameter()?;
81
82        match evaluation_result {
83            EvaluationResult::DirectValue(direct) => {
84                self.generate_direct_value(direct, pt_regs_ptr)
85            }
86            EvaluationResult::MemoryLocation(location) => {
87                self.generate_memory_location(location, pt_regs_ptr, dwarf_type)
88            }
89            EvaluationResult::Optimized => {
90                debug!("Variable {} is optimized out", var_name);
91                // Return a placeholder value for optimized out variables
92                Ok(self.context.i64_type().const_zero().into())
93            }
94            EvaluationResult::Composite(members) => {
95                debug!(
96                    "Variable {} is composite with {} members",
97                    var_name,
98                    members.len()
99                );
100                // For now, just return the first member if available
101                if let Some(first_member) = members.first() {
102                    self.evaluate_result_to_llvm_value(
103                        &first_member.location,
104                        dwarf_type,
105                        var_name,
106                        pc_address,
107                    )
108                } else {
109                    Ok(self.context.i64_type().const_zero().into())
110                }
111            }
112        }
113    }
114
115    /// Variant that allows passing an explicit module hint for offsets lookup
116    pub fn evaluation_result_to_address_with_hint(
117        &mut self,
118        evaluation_result: &EvaluationResult,
119        status_ptr: Option<PointerValue<'ctx>>,
120        module_hint: Option<&str>,
121    ) -> Result<IntValue<'ctx>> {
122        // Policy note:
123        // - Link-time addresses (DW_OP_addr or constant-foldable address expressions) are
124        //   always rebased using per-module section offsets (ASLR) to get a runtime address.
125        // - Runtime-derived addresses (register/stack-relative or computed via dereference)
126        //   are used as-is and are NOT rebased.
127        // The caller signals which path we are on by providing the original evaluation_result.
128        let pt_regs_ptr = self.get_pt_regs_parameter()?;
129        // Default assumption: offsets are available unless a lookup proves otherwise.
130        self.store_offsets_found_const(true)?;
131
132        match evaluation_result {
133            EvaluationResult::MemoryLocation(LocationResult::Address(addr)) => {
134                // Unified: always attempt runtime rebasing via proc_module_offsets
135                let ctx = self.get_compile_time_context()?;
136                let module_for_offsets = module_hint
137                    .map(|s| s.to_string())
138                    .or_else(|| self.current_resolved_var_module_path.clone())
139                    .unwrap_or_else(|| ctx.module_path.clone());
140                let st_code = self.section_code_for_address(&module_for_offsets, *addr);
141                let cookie = self.cookie_for_module_or_fallback(&module_for_offsets);
142                let link_val = self.context.i64_type().const_int(*addr, false);
143                let (rt_addr, found_flag) =
144                    self.generate_runtime_address_from_offsets(link_val, st_code, cookie)?;
145                if let Some(sp) = status_ptr {
146                    let is_miss = self
147                        .builder
148                        .build_int_compare(
149                            inkwell::IntPredicate::EQ,
150                            found_flag,
151                            self.context.bool_type().const_zero(),
152                            "is_off_miss",
153                        )
154                        .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
155                    let cur_status = self
156                        .builder
157                        .build_load(self.context.i8_type(), sp, "cur_status")
158                        .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
159                    let is_ok = self
160                        .builder
161                        .build_int_compare(
162                            inkwell::IntPredicate::EQ,
163                            cur_status.into_int_value(),
164                            self.context.i8_type().const_zero(),
165                            "status_is_ok",
166                        )
167                        .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
168                    let should_store = self
169                        .builder
170                        .build_and(is_miss, is_ok, "store_offsets_unavail")
171                        .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
172                    let new_status = self
173                        .builder
174                        .build_select(
175                            should_store,
176                            self.context
177                                .i8_type()
178                                .const_int(
179                                    ghostscope_protocol::VariableStatus::OffsetsUnavailable as u64,
180                                    false,
181                                )
182                                .into(),
183                            cur_status,
184                            "new_status",
185                        )
186                        .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
187                    self.builder
188                        .build_store(sp, new_status)
189                        .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
190                }
191                self.store_offsets_found_flag(found_flag)?;
192                self.current_resolved_var_module_path = None;
193                Ok(rt_addr)
194            }
195            EvaluationResult::MemoryLocation(LocationResult::RegisterAddress {
196                register,
197                offset,
198                ..
199            }) => {
200                let reg_val = self.load_register_value(*register, pt_regs_ptr)?;
201                if let BasicValueEnum::IntValue(reg_i) = reg_val {
202                    if let Some(ofs) = offset {
203                        let ofs_val = self.context.i64_type().const_int(*ofs as u64, true);
204                        let sum = self
205                            .builder
206                            .build_int_add(reg_i, ofs_val, "addr_with_offset")
207                            .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
208                        Ok(sum)
209                    } else {
210                        Ok(reg_i)
211                    }
212                } else {
213                    Err(CodeGenError::RegisterMappingError(
214                        "Register value is not integer".to_string(),
215                    ))
216                }
217            }
218            EvaluationResult::MemoryLocation(LocationResult::ComputedLocation { steps }) => {
219                // Try to fold constant-only address expressions (e.g., global + const offset)
220                // If foldable, treat as link-time address and apply ASLR offsets via map.
221                let mut const_stack: Vec<i64> = Vec::new();
222                let mut foldable = true;
223                for s in steps.iter() {
224                    match s {
225                        ComputeStep::PushConstant(v) => const_stack.push(*v),
226                        ComputeStep::Add => {
227                            if const_stack.len() >= 2 {
228                                let b = const_stack.pop().unwrap();
229                                let a = const_stack.pop().unwrap();
230                                const_stack.push(a.saturating_add(b));
231                            } else {
232                                foldable = false;
233                                break;
234                            }
235                        }
236                        // Any register load or deref means runtime-derived address; not foldable
237                        ComputeStep::LoadRegister(_) | ComputeStep::Dereference { .. } => {
238                            foldable = false;
239                            break;
240                        }
241                        _ => {
242                            // Unknown/non-add op: treat as non-foldable
243                            foldable = false;
244                            break;
245                        }
246                    }
247                }
248
249                if foldable && const_stack.len() == 1 {
250                    let link_addr_u = const_stack[0] as u64;
251                    let ctx = self.get_compile_time_context()?;
252                    let module_for_offsets = module_hint
253                        .map(|s| s.to_string())
254                        .or_else(|| self.current_resolved_var_module_path.clone())
255                        .unwrap_or_else(|| ctx.module_path.clone());
256                    let st_code = self.section_code_for_address(&module_for_offsets, link_addr_u);
257                    let cookie = self.cookie_for_module_or_fallback(&module_for_offsets);
258                    let link_val = self.context.i64_type().const_int(link_addr_u, false);
259                    let (rt_addr, found_flag) =
260                        self.generate_runtime_address_from_offsets(link_val, st_code, cookie)?;
261                    if let Some(sp) = status_ptr {
262                        let is_miss = self
263                            .builder
264                            .build_int_compare(
265                                inkwell::IntPredicate::EQ,
266                                found_flag,
267                                self.context.bool_type().const_zero(),
268                                "is_off_miss",
269                            )
270                            .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
271                        let cur_status = self
272                            .builder
273                            .build_load(self.context.i8_type(), sp, "cur_status")
274                            .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
275                        let is_ok = self
276                            .builder
277                            .build_int_compare(
278                                inkwell::IntPredicate::EQ,
279                                cur_status.into_int_value(),
280                                self.context.i8_type().const_zero(),
281                                "status_is_ok",
282                            )
283                            .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
284                        let should_store = self
285                            .builder
286                            .build_and(is_miss, is_ok, "store_offsets_unavail")
287                            .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
288                        let new_status = self
289                            .builder
290                            .build_select(
291                                should_store,
292                                self.context
293                                    .i8_type()
294                                    .const_int(
295                                        ghostscope_protocol::VariableStatus::OffsetsUnavailable
296                                            as u64,
297                                        false,
298                                    )
299                                    .into(),
300                                cur_status,
301                                "new_status",
302                            )
303                            .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
304                        self.builder
305                            .build_store(sp, new_status)
306                            .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
307                    }
308                    self.current_resolved_var_module_path = None;
309                    return Ok(rt_addr);
310                }
311
312                // Attempt: if steps start with PushConstant(base) and first dynamic op is Dereference
313                // (with no LoadRegister before it), apply ASLR offsets to base and continue
314                if let Some(ComputeStep::PushConstant(base_const)) = steps.first() {
315                    // Scan until first Dereference or LoadRegister
316                    let mut saw_reg = false;
317                    let mut saw_deref = false;
318                    for s in &steps[1..] {
319                        match s {
320                            ComputeStep::LoadRegister(_) => {
321                                saw_reg = true;
322                                break;
323                            }
324                            ComputeStep::Dereference { .. } => {
325                                saw_deref = true;
326                                break;
327                            }
328                            _ => {}
329                        }
330                    }
331                    if saw_deref && !saw_reg {
332                        let link_addr_u = *base_const as u64;
333                        let ctx = self.get_compile_time_context()?;
334                        let module_for_offsets = module_hint
335                            .map(|s| s.to_string())
336                            .or_else(|| self.current_resolved_var_module_path.clone())
337                            .unwrap_or_else(|| ctx.module_path.clone());
338                        let st_code =
339                            self.section_code_for_address(&module_for_offsets, link_addr_u);
340                        let cookie = self.cookie_for_module_or_fallback(&module_for_offsets);
341                        let link_val = self.context.i64_type().const_int(link_addr_u, false);
342                        let (rt, found_flag) =
343                            self.generate_runtime_address_from_offsets(link_val, st_code, cookie)?;
344                        if let Some(sp) = status_ptr {
345                            let is_miss = self
346                                .builder
347                                .build_int_compare(
348                                    inkwell::IntPredicate::EQ,
349                                    found_flag,
350                                    self.context.bool_type().const_zero(),
351                                    "is_off_miss",
352                                )
353                                .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
354                            let cur_status = self
355                                .builder
356                                .build_load(self.context.i8_type(), sp, "cur_status")
357                                .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
358                            let is_ok = self
359                                .builder
360                                .build_int_compare(
361                                    inkwell::IntPredicate::EQ,
362                                    cur_status.into_int_value(),
363                                    self.context.i8_type().const_zero(),
364                                    "status_is_ok",
365                                )
366                                .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
367                            let should_store = self
368                                .builder
369                                .build_and(is_miss, is_ok, "store_offsets_unavail")
370                                .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
371                            let new_status = self
372                                .builder
373                                .build_select(
374                                    should_store,
375                                    self.context
376                                        .i8_type()
377                                        .const_int(
378                                            ghostscope_protocol::VariableStatus::OffsetsUnavailable
379                                                as u64,
380                                            false,
381                                        )
382                                        .into(),
383                                    cur_status,
384                                    "new_status",
385                                )
386                                .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
387                            self.builder
388                                .build_store(sp, new_status)
389                                .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
390                        }
391                        // Execute remaining steps with rt pre-pushed as base
392                        let rest = &steps[1..];
393                        let val = self.generate_compute_steps(
394                            rest,
395                            pt_regs_ptr,
396                            None,
397                            status_ptr,
398                            Some(rt),
399                        )?;
400                        if let BasicValueEnum::IntValue(i) = val {
401                            return Ok(i);
402                        } else {
403                            return Err(CodeGenError::LLVMError(
404                                "Computed location did not produce integer".to_string(),
405                            ));
406                        }
407                    }
408                }
409
410                // Fallback: execute steps at runtime and use the result directly (no offsets)
411                let val =
412                    self.generate_compute_steps(steps, pt_regs_ptr, None, status_ptr, None)?;
413                if let BasicValueEnum::IntValue(i) = val {
414                    Ok(i)
415                } else {
416                    Err(CodeGenError::LLVMError(
417                        "Computed location did not produce integer".to_string(),
418                    ))
419                }
420            }
421            _ => Err(CodeGenError::NotImplemented(
422                "Unable to compute address from evaluation result".to_string(),
423            )),
424        }
425    }
426
427    /// Convert DWARF type size to MemoryAccessSize
428    fn dwarf_type_to_memory_access_size(&self, dwarf_type: &TypeInfo) -> MemoryAccessSize {
429        let size = Self::get_dwarf_type_size(dwarf_type);
430        match size {
431            1 => MemoryAccessSize::U8,
432            2 => MemoryAccessSize::U16,
433            4 => MemoryAccessSize::U32,
434            8 => MemoryAccessSize::U64,
435            _ => MemoryAccessSize::U64, // Default to U64 for unknown sizes
436        }
437    }
438
439    /// Generate LLVM IR for direct value result
440    fn generate_direct_value(
441        &mut self,
442        direct: &DirectValueResult,
443        pt_regs_ptr: PointerValue<'ctx>,
444    ) -> Result<BasicValueEnum<'ctx>> {
445        match direct {
446            DirectValueResult::Constant(value) => {
447                debug!("Generating constant: {}", value);
448                Ok(self
449                    .context
450                    .i64_type()
451                    .const_int(*value as u64, true)
452                    .into())
453            }
454
455            DirectValueResult::ImplicitValue(bytes) => {
456                debug!("Generating implicit value: {} bytes", bytes.len());
457                // Convert bytes to integer value (little-endian)
458                let mut value: u64 = 0;
459                for (i, &byte) in bytes.iter().enumerate().take(8) {
460                    value |= (byte as u64) << (i * 8);
461                }
462                Ok(self.context.i64_type().const_int(value, false).into())
463            }
464
465            DirectValueResult::RegisterValue(reg_num) => {
466                debug!("Generating register value: {}", reg_num);
467                let reg_value = self.load_register_value(*reg_num, pt_regs_ptr)?;
468                Ok(reg_value)
469            }
470
471            DirectValueResult::ComputedValue { steps, result_size } => {
472                debug!("Generating computed value: {} steps", steps.len());
473                let status_ptr = if self.condition_context_active {
474                    Some(self.get_or_create_cond_error_global())
475                } else {
476                    None
477                };
478                self.generate_compute_steps(
479                    steps,
480                    pt_regs_ptr,
481                    Some(*result_size),
482                    status_ptr,
483                    None,
484                )
485            }
486        }
487    }
488
489    /// Generate LLVM IR for memory location result
490    fn generate_memory_location(
491        &mut self,
492        location: &LocationResult,
493        pt_regs_ptr: PointerValue<'ctx>,
494        dwarf_type: &TypeInfo,
495    ) -> Result<BasicValueEnum<'ctx>> {
496        match location {
497            // Policy note:
498            // We decide ASLR rebasing based on the DWARF evaluation RESULT SHAPE, not a
499            // "global variable" tag. Whenever DWARF yields a link-time address
500            // (LocationResult::Address) — including file-scope globals, static locals,
501            // rodata/data/bss, or any constant-folded address — we MUST apply per-module
502            // section offsets (.text/.rodata/.data/.bss) to obtain the runtime address.
503            // Conversely, for runtime-derived addresses (RegisterAddress or computed from
504            // registers/dereferences), we DO NOT rebase.
505            LocationResult::Address(addr) => {
506                debug!("Generating absolute address: 0x{:x}", addr);
507                // Convert link-time address to runtime address using ASLR offsets when available
508                let module_hint = self.current_resolved_var_module_path.clone();
509                let status_ptr = if self.condition_context_active {
510                    Some(self.get_or_create_cond_error_global())
511                } else {
512                    None
513                };
514                let eval = ghostscope_dwarf::EvaluationResult::MemoryLocation(
515                    ghostscope_dwarf::LocationResult::Address(*addr),
516                );
517                let rt_addr = self.evaluation_result_to_address_with_hint(
518                    &eval,
519                    status_ptr,
520                    module_hint.as_deref(),
521                )?;
522                // Aggregate types (struct/union/array) are represented as pointers in expressions
523                if self.is_aggregate_type(dwarf_type) {
524                    let ptr_ty = self.context.ptr_type(inkwell::AddressSpace::default());
525                    let as_ptr = self
526                        .builder
527                        .build_int_to_ptr(rt_addr, ptr_ty, "aggregate_addr_as_ptr")
528                        .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
529                    return Ok(as_ptr.into());
530                }
531                // Use DWARF type size for memory access
532                let access_size = self.dwarf_type_to_memory_access_size(dwarf_type);
533                if self.condition_context_active {
534                    self.generate_memory_read_with_status(rt_addr, access_size)
535                } else {
536                    self.generate_memory_read(rt_addr, access_size)
537                }
538            }
539
540            LocationResult::RegisterAddress {
541                register,
542                offset,
543                size,
544            } => {
545                debug!(
546                    "Generating register address: reg{} {:+}",
547                    register,
548                    offset.unwrap_or(0)
549                );
550
551                // Load register value
552                let reg_value = self.load_register_value(*register, pt_regs_ptr)?;
553
554                // Add offset if present
555                let final_addr = if let Some(offset) = offset {
556                    let offset_value = self.context.i64_type().const_int(*offset as u64, true);
557                    if let BasicValueEnum::IntValue(reg_int) = reg_value {
558                        self.builder
559                            .build_int_add(reg_int, offset_value, "addr_with_offset")
560                            .map_err(|e| CodeGenError::LLVMError(e.to_string()))?
561                    } else {
562                        return Err(CodeGenError::RegisterMappingError(
563                            "Register value is not integer".to_string(),
564                        ));
565                    }
566                } else if let BasicValueEnum::IntValue(reg_int) = reg_value {
567                    reg_int
568                } else {
569                    return Err(CodeGenError::RegisterMappingError(
570                        "Register value is not integer".to_string(),
571                    ));
572                };
573                // Aggregate types: return pointer instead of reading as scalar
574                if self.is_aggregate_type(dwarf_type) {
575                    let ptr_ty = self.context.ptr_type(inkwell::AddressSpace::default());
576                    let as_ptr = self
577                        .builder
578                        .build_int_to_ptr(final_addr, ptr_ty, "aggregate_addr_as_ptr")
579                        .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
580                    return Ok(as_ptr.into());
581                }
582                // Determine memory access size - prefer LocationResult size if available, otherwise use DWARF type
583                let access_size = size
584                    .map(|s| match s {
585                        1 => MemoryAccessSize::U8,
586                        2 => MemoryAccessSize::U16,
587                        4 => MemoryAccessSize::U32,
588                        _ => MemoryAccessSize::U64,
589                    })
590                    .unwrap_or_else(|| self.dwarf_type_to_memory_access_size(dwarf_type));
591
592                if self.condition_context_active {
593                    self.generate_memory_read_with_status(final_addr, access_size)
594                } else {
595                    self.generate_memory_read(final_addr, access_size)
596                }
597            }
598
599            LocationResult::ComputedLocation { steps } => {
600                debug!("Generating computed location: {} steps", steps.len());
601                // Execute steps to compute the address
602                let status_ptr = if self.condition_context_active {
603                    Some(self.get_or_create_cond_error_global())
604                } else {
605                    None
606                };
607                let addr_value =
608                    self.generate_compute_steps(steps, pt_regs_ptr, None, status_ptr, None)?;
609                if let BasicValueEnum::IntValue(addr) = addr_value {
610                    // For aggregate types, return pointer to address instead of loading a value
611                    if self.is_aggregate_type(dwarf_type) {
612                        let ptr_ty = self.context.ptr_type(inkwell::AddressSpace::default());
613                        let as_ptr = self
614                            .builder
615                            .build_int_to_ptr(addr, ptr_ty, "aggregate_addr_as_ptr")
616                            .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
617                        return Ok(as_ptr.into());
618                    }
619                    // Use DWARF type size for memory access
620                    let access_size = self.dwarf_type_to_memory_access_size(dwarf_type);
621                    if self.condition_context_active {
622                        self.generate_memory_read_with_status(addr, access_size)
623                    } else {
624                        self.generate_memory_read(addr, access_size)
625                    }
626                } else {
627                    Err(CodeGenError::LLVMError(
628                        "Address computation must return integer".to_string(),
629                    ))
630                }
631            }
632        }
633    }
634
635    /// Execute a sequence of compute steps
636    fn generate_compute_steps(
637        &mut self,
638        steps: &[ComputeStep],
639        pt_regs_ptr: PointerValue<'ctx>,
640        _result_size: Option<MemoryAccessSize>,
641        status_ptr: Option<PointerValue<'ctx>>,
642        initial_top: Option<IntValue<'ctx>>,
643    ) -> Result<BasicValueEnum<'ctx>> {
644        // Implement stack-based computation
645        let mut stack: Vec<IntValue<'ctx>> = Vec::new();
646        // Track a runtime null-pointer flag from dereference steps; when true, subsequent
647        // arithmetic will be masked to zero to avoid reads at small offsets from NULL.
648        let mut deref_null_flag: Option<inkwell::values::IntValue> = None;
649        if let Some(top) = initial_top {
650            stack.push(top);
651        }
652
653        for step in steps {
654            match step {
655                ComputeStep::LoadRegister(reg_num) => {
656                    let reg_value = self.load_register_value(*reg_num, pt_regs_ptr)?;
657                    if let BasicValueEnum::IntValue(int_val) = reg_value {
658                        stack.push(int_val);
659                    } else {
660                        return Err(CodeGenError::RegisterMappingError(format!(
661                            "Register {reg_num} did not return integer value"
662                        )));
663                    }
664                }
665
666                ComputeStep::PushConstant(value) => {
667                    let const_val = self.context.i64_type().const_int(*value as u64, true);
668                    stack.push(const_val);
669                }
670
671                ComputeStep::Add => {
672                    if let (Some(b), Some(a)) = (stack.pop(), stack.pop()) {
673                        let sum_val = self
674                            .builder
675                            .build_int_add(a, b, "add")
676                            .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
677                        if let Some(nf) = deref_null_flag {
678                            let masked_bv = self
679                                .builder
680                                .build_select::<inkwell::values::BasicValueEnum<'ctx>, _>(
681                                    nf,
682                                    self.context.i64_type().const_zero().into(),
683                                    sum_val.into(),
684                                    "add_masked",
685                                )
686                                .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
687                            stack.push(masked_bv.into_int_value());
688                        } else {
689                            stack.push(sum_val);
690                        }
691                    } else {
692                        return Err(CodeGenError::LLVMError(
693                            "Stack underflow in Add".to_string(),
694                        ));
695                    }
696                }
697
698                ComputeStep::Sub => {
699                    if let (Some(b), Some(a)) = (stack.pop(), stack.pop()) {
700                        let result = self
701                            .builder
702                            .build_int_sub(a, b, "sub")
703                            .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
704                        stack.push(result);
705                    } else {
706                        return Err(CodeGenError::LLVMError(
707                            "Stack underflow in Sub".to_string(),
708                        ));
709                    }
710                }
711
712                ComputeStep::Mul => {
713                    if let (Some(b), Some(a)) = (stack.pop(), stack.pop()) {
714                        let result = self
715                            .builder
716                            .build_int_mul(a, b, "mul")
717                            .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
718                        stack.push(result);
719                    } else {
720                        return Err(CodeGenError::LLVMError(
721                            "Stack underflow in Mul".to_string(),
722                        ));
723                    }
724                }
725
726                ComputeStep::Div => {
727                    if let (Some(b), Some(a)) = (stack.pop(), stack.pop()) {
728                        let result = self
729                            .builder
730                            .build_int_signed_div(a, b, "div")
731                            .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
732                        stack.push(result);
733                    } else {
734                        return Err(CodeGenError::LLVMError(
735                            "Stack underflow in Div".to_string(),
736                        ));
737                    }
738                }
739
740                ComputeStep::And => {
741                    if let (Some(b), Some(a)) = (stack.pop(), stack.pop()) {
742                        let result = self
743                            .builder
744                            .build_and(a, b, "and")
745                            .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
746                        stack.push(result);
747                    } else {
748                        return Err(CodeGenError::LLVMError(
749                            "Stack underflow in BitwiseAnd".to_string(),
750                        ));
751                    }
752                }
753
754                ComputeStep::Or => {
755                    if let (Some(b), Some(a)) = (stack.pop(), stack.pop()) {
756                        let result = self
757                            .builder
758                            .build_or(a, b, "or")
759                            .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
760                        stack.push(result);
761                    } else {
762                        return Err(CodeGenError::LLVMError(
763                            "Stack underflow in BitwiseOr".to_string(),
764                        ));
765                    }
766                }
767
768                ComputeStep::Xor => {
769                    if let (Some(b), Some(a)) = (stack.pop(), stack.pop()) {
770                        let result = self
771                            .builder
772                            .build_xor(a, b, "xor")
773                            .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
774                        stack.push(result);
775                    } else {
776                        return Err(CodeGenError::LLVMError(
777                            "Stack underflow in BitwiseXor".to_string(),
778                        ));
779                    }
780                }
781
782                ComputeStep::Shl => {
783                    if let (Some(b), Some(a)) = (stack.pop(), stack.pop()) {
784                        let result = self
785                            .builder
786                            .build_left_shift(a, b, "shl")
787                            .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
788                        stack.push(result);
789                    } else {
790                        return Err(CodeGenError::LLVMError(
791                            "Stack underflow in ShiftLeft".to_string(),
792                        ));
793                    }
794                }
795
796                ComputeStep::Shr => {
797                    if let (Some(b), Some(a)) = (stack.pop(), stack.pop()) {
798                        let result = self
799                            .builder
800                            .build_right_shift(a, b, false, "shr")
801                            .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
802                        stack.push(result);
803                    } else {
804                        return Err(CodeGenError::LLVMError(
805                            "Stack underflow in ShiftRight".to_string(),
806                        ));
807                    }
808                }
809
810                ComputeStep::Dereference { size } => {
811                    if let Some(addr) = stack.pop() {
812                        // Null guard: if addr == 0, set NullDeref (if status_ptr provided and current is Ok)
813                        let zero64 = self.context.i64_type().const_zero();
814                        let is_null = self
815                            .builder
816                            .build_int_compare(
817                                inkwell::IntPredicate::EQ,
818                                addr,
819                                zero64,
820                                "is_null_deref",
821                            )
822                            .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
823
824                        let cur_fn = self
825                            .builder
826                            .get_insert_block()
827                            .unwrap()
828                            .get_parent()
829                            .unwrap();
830                        let null_bb = self.context.append_basic_block(cur_fn, "deref_null");
831                        let read_bb = self.context.append_basic_block(cur_fn, "deref_read");
832                        let cont_bb = self.context.append_basic_block(cur_fn, "deref_cont");
833                        self.builder
834                            .build_conditional_branch(is_null, null_bb, read_bb)
835                            .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
836
837                        // Null path: optionally set status=NullDeref if currently Ok, branch to cont
838                        self.builder.position_at_end(null_bb);
839                        let null_val = self.context.i64_type().const_zero();
840                        if let Some(sp) = status_ptr {
841                            let cur_status = self
842                                .builder
843                                .build_load(self.context.i8_type(), sp, "cur_status")
844                                .map_err(|e| CodeGenError::LLVMError(e.to_string()))?
845                                .into_int_value();
846                            let is_ok = self
847                                .builder
848                                .build_int_compare(
849                                    inkwell::IntPredicate::EQ,
850                                    cur_status,
851                                    self.context.i8_type().const_zero(),
852                                    "status_is_ok",
853                                )
854                                .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
855                            let then_val = self.context.i8_type().const_int(
856                                ghostscope_protocol::VariableStatus::NullDeref as u64,
857                                false,
858                            );
859                            let new_status_bv = self
860                                .builder
861                                .build_select::<inkwell::values::BasicValueEnum<'ctx>, _>(
862                                    is_ok,
863                                    then_val.into(),
864                                    cur_status.into(),
865                                    "new_status",
866                                )
867                                .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
868                            self.builder
869                                .build_store(sp, new_status_bv)
870                                .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
871                        }
872                        self.builder
873                            .build_unconditional_branch(cont_bb)
874                            .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
875
876                        // Read path: load pointer-sized value into tmp then branch to cont
877                        self.builder.position_at_end(read_bb);
878                        let access_size = *size;
879                        let loaded_bv = self.generate_memory_read(addr, access_size)?;
880                        let loaded_int = if let BasicValueEnum::IntValue(int_val) = loaded_bv {
881                            int_val
882                        } else {
883                            return Err(CodeGenError::LLVMError(
884                                "Memory load did not return integer".to_string(),
885                            ));
886                        };
887                        self.builder
888                            .build_unconditional_branch(cont_bb)
889                            .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
890
891                        // Continue at cont: create PHI to merge null/read values, push once
892                        self.builder.position_at_end(cont_bb);
893                        let phi = self
894                            .builder
895                            .build_phi(self.context.i64_type(), "deref_phi")
896                            .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
897                        phi.add_incoming(&[(&null_val, null_bb), (&loaded_int, read_bb)]);
898                        let merged = phi.as_basic_value().into_int_value();
899                        // Update null flag based on loaded pointer value being zero
900                        let is_zero_ptr = self
901                            .builder
902                            .build_int_compare(
903                                inkwell::IntPredicate::EQ,
904                                merged,
905                                self.context.i64_type().const_zero(),
906                                "is_zero_ptr",
907                            )
908                            .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
909                        deref_null_flag = Some(match deref_null_flag {
910                            Some(prev) => self
911                                .builder
912                                .build_or(prev, is_zero_ptr, "null_or")
913                                .map_err(|e| CodeGenError::LLVMError(e.to_string()))?,
914                            None => is_zero_ptr,
915                        });
916                        if let (Some(sp), Some(nf)) = (status_ptr, deref_null_flag) {
917                            // Only store NullDeref if currently OK and nf is true
918                            let cur_status = self
919                                .builder
920                                .build_load(self.context.i8_type(), sp, "cur_status")
921                                .map_err(|e| CodeGenError::LLVMError(e.to_string()))?
922                                .into_int_value();
923                            let is_ok = self
924                                .builder
925                                .build_int_compare(
926                                    inkwell::IntPredicate::EQ,
927                                    cur_status,
928                                    self.context.i8_type().const_zero(),
929                                    "status_is_ok2",
930                                )
931                                .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
932                            let should_store = self
933                                .builder
934                                .build_and(is_ok, nf, "store_null_deref_from_ptr")
935                                .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
936                            let then_val = self.context.i8_type().const_int(
937                                ghostscope_protocol::VariableStatus::NullDeref as u64,
938                                false,
939                            );
940                            let new_status_bv = self
941                                .builder
942                                .build_select::<inkwell::values::BasicValueEnum<'ctx>, _>(
943                                    should_store,
944                                    then_val.into(),
945                                    cur_status.into(),
946                                    "new_status2",
947                                )
948                                .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
949                            self.builder
950                                .build_store(sp, new_status_bv)
951                                .map_err(|e| CodeGenError::LLVMError(e.to_string()))?;
952                        }
953                        stack.push(merged);
954                    } else {
955                        return Err(CodeGenError::LLVMError(
956                            "Stack underflow in LoadMemory".to_string(),
957                        ));
958                    }
959                }
960
961                // Add catch-all for unimplemented operations
962                _ => {
963                    warn!("Unimplemented ComputeStep: {:?}", step);
964                    return Err(CodeGenError::NotImplemented(format!(
965                        "ComputeStep {step:?} not yet implemented"
966                    )));
967                }
968            }
969        }
970
971        if stack.len() == 1 {
972            Ok(stack.pop().unwrap().into())
973        } else {
974            Err(CodeGenError::LLVMError(format!(
975                "Invalid stack state after computation: {} elements remaining",
976                stack.len()
977            )))
978        }
979    }
980
981    /// Query DWARF for complex expression (supports member access, array access, etc.)
982    pub fn query_dwarf_for_complex_expr(
983        &mut self,
984        expr: &crate::script::Expr,
985    ) -> Result<Option<VariableWithEvaluation>> {
986        use crate::script::Expr;
987
988        // Expand script alias variables inside the expression so downstream
989        // DWARF resolvers see the actual DWARF-based expression tree.
990        // Guard against self-referential or cyclic aliases.
991        fn expand_aliases(
992            ctx: &crate::ebpf::context::EbpfContext<'_>,
993            e: &crate::script::Expr,
994            visited: &mut std::collections::HashSet<String>,
995            depth: usize,
996        ) -> std::result::Result<crate::script::Expr, super::context::CodeGenError> {
997            use crate::script::Expr as E;
998            const MAX_DEPTH: usize = 64;
999            if depth > MAX_DEPTH {
1000                return Err(super::context::CodeGenError::TypeError(
1001                    "alias expansion depth exceeded (cycle?)".to_string(),
1002                ));
1003            }
1004            Ok(match e {
1005                E::Variable(name) => {
1006                    if ctx.alias_variable_exists(name) {
1007                        if !visited.insert(name.clone()) {
1008                            return Err(super::context::CodeGenError::TypeError(format!(
1009                                "alias cycle detected for '{name}'"
1010                            )));
1011                        }
1012                        if let Some(t) = ctx.get_alias_variable(name) {
1013                            let res = expand_aliases(ctx, &t, visited, depth + 1)?;
1014                            visited.remove(name);
1015                            res
1016                        } else {
1017                            e.clone()
1018                        }
1019                    } else {
1020                        e.clone()
1021                    }
1022                }
1023                E::MemberAccess(obj, field) => {
1024                    let base = expand_aliases(ctx, obj, visited, depth + 1)?;
1025                    E::MemberAccess(Box::new(base), field.clone())
1026                }
1027                E::ArrayAccess(arr, idx) => {
1028                    let base = expand_aliases(ctx, arr, visited, depth + 1)?;
1029                    let idx2 = expand_aliases(ctx, idx, visited, depth + 1)?;
1030                    E::ArrayAccess(Box::new(base), Box::new(idx2))
1031                }
1032                E::PointerDeref(inner) => {
1033                    let in2 = expand_aliases(ctx, inner, visited, depth + 1)?;
1034                    E::PointerDeref(Box::new(in2))
1035                }
1036                E::AddressOf(inner) => {
1037                    let in2 = expand_aliases(ctx, inner, visited, depth + 1)?;
1038                    E::AddressOf(Box::new(in2))
1039                }
1040                E::ChainAccess(chain) => {
1041                    if chain.is_empty() {
1042                        return Ok(e.clone());
1043                    }
1044                    let head = &chain[0];
1045                    if ctx.alias_variable_exists(head) {
1046                        if !visited.insert(head.clone()) {
1047                            return Err(super::context::CodeGenError::TypeError(format!(
1048                                "alias cycle detected for '{head}'"
1049                            )));
1050                        }
1051                        if let Some(alias_expr) = ctx.get_alias_variable(head) {
1052                            // Expand the alias head, then append member segments
1053                            let mut acc = expand_aliases(ctx, &alias_expr, visited, depth + 1)?;
1054                            for seg in &chain[1..] {
1055                                acc = E::MemberAccess(Box::new(acc), seg.clone());
1056                            }
1057                            visited.remove(head);
1058                            acc
1059                        } else {
1060                            e.clone()
1061                        }
1062                    } else {
1063                        e.clone()
1064                    }
1065                }
1066                E::BuiltinCall { name, args } => E::BuiltinCall {
1067                    name: name.clone(),
1068                    args: args
1069                        .iter()
1070                        .map(|a| expand_aliases(ctx, a, visited, depth + 1))
1071                        .collect::<std::result::Result<Vec<_>, _>>()?,
1072                },
1073                E::BinaryOp { left, op, right } => E::BinaryOp {
1074                    left: Box::new(expand_aliases(ctx, left, visited, depth + 1)?),
1075                    op: op.clone(),
1076                    right: Box::new(expand_aliases(ctx, right, visited, depth + 1)?),
1077                },
1078                _ => e.clone(),
1079            })
1080        }
1081
1082        let mut visited = std::collections::HashSet::new();
1083        let expanded = expand_aliases(self, expr, &mut visited, 0)?;
1084
1085        match &expanded {
1086            // Simple variable lookup
1087            Expr::Variable(var_name) => self.query_dwarf_for_variable(var_name),
1088
1089            // Member access: obj.field
1090            Expr::MemberAccess(obj_expr, field_name) => {
1091                self.query_dwarf_for_member_access(obj_expr, field_name)
1092            }
1093
1094            // Array access: arr[index]
1095            Expr::ArrayAccess(array_expr, index_expr) => {
1096                self.query_dwarf_for_array_access(array_expr, index_expr)
1097            }
1098
1099            // Chain access: person.name.first
1100            Expr::ChainAccess(chain) => self.query_dwarf_for_chain_access(chain),
1101
1102            // Pointer dereference: *ptr
1103            Expr::PointerDeref(expr) => self.query_dwarf_for_pointer_deref(expr),
1104
1105            // Other expression types are not supported for DWARF queries
1106            _ => Ok(None),
1107        }
1108    }
1109
1110    /// Query DWARF for variable information
1111    pub fn query_dwarf_for_variable(
1112        &mut self,
1113        var_name: &str,
1114    ) -> Result<Option<VariableWithEvaluation>> {
1115        if self.process_analyzer.is_none() {
1116            return Err(CodeGenError::DwarfError(
1117                "No DWARF analyzer available".to_string(),
1118            ));
1119        }
1120
1121        let context = self.get_compile_time_context()?;
1122        let pc_address = context.pc_address;
1123        let module_path = &context.module_path;
1124
1125        debug!(
1126            "Querying DWARF for variable '{}' at PC 0x{:x} in module '{}'",
1127            var_name, pc_address, module_path
1128        );
1129
1130        // Query DWARF analyzer for variable
1131        let analyzer = unsafe { &mut *(self.process_analyzer.unwrap()) };
1132
1133        let module_address = ghostscope_dwarf::ModuleAddress::new(
1134            std::path::PathBuf::from(module_path.clone()),
1135            pc_address,
1136        );
1137
1138        let module_path_owned = module_path.clone();
1139        let lookup_globals = |analyzer: &mut ghostscope_dwarf::DwarfAnalyzer| -> Result<
1140            Option<(std::path::PathBuf, VariableWithEvaluation)>,
1141        > {
1142            debug!(
1143                "Variable '{}' not found in locals; attempting global lookup",
1144                var_name
1145            );
1146            let mut matches = analyzer.find_global_variables_by_name(var_name);
1147            if matches.is_empty() {
1148                return Ok(None);
1149            }
1150            // Prefer globals defined in the current module.
1151            let preferred: Vec<(
1152                std::path::PathBuf,
1153                ghostscope_dwarf::core::GlobalVariableInfo,
1154            )> = matches
1155                .iter()
1156                .filter(|(p, _)| p.to_string_lossy() == module_path_owned.as_str())
1157                .cloned()
1158                .collect();
1159
1160            let chosen = if preferred.len() == 1 {
1161                Some(preferred[0].clone())
1162            } else if preferred.is_empty() && matches.len() == 1 {
1163                Some(matches.remove(0))
1164            } else {
1165                debug!(
1166                    "Global '{}' is ambiguous across modules ({} matches)",
1167                    var_name,
1168                    matches.len()
1169                );
1170                return Err(CodeGenError::DwarfError(format!(
1171                    "Ambiguous global '{}': {} matches",
1172                    var_name,
1173                    matches.len()
1174                )));
1175            };
1176
1177            if let Some((mpath, info)) = chosen {
1178                let gv = analyzer
1179                    .resolve_variable_by_offsets_in_module(
1180                        &mpath,
1181                        info.unit_offset,
1182                        info.die_offset,
1183                    )
1184                    .map_err(|err| CodeGenError::DwarfError(err.to_string()))?;
1185                Ok(Some((mpath, gv)))
1186            } else {
1187                Ok(None)
1188            }
1189        };
1190
1191        match analyzer.get_all_variables_at_address(&module_address) {
1192            Ok(vars) => {
1193                if let Some(var_result) = vars.iter().find(|v| v.name == var_name).or_else(|| {
1194                    let prefix = format!("{var_name}@");
1195                    vars.iter().find(|v| v.name.starts_with(&prefix))
1196                }) {
1197                    debug!("Found DWARF variable '{}' in locals/params", var_name);
1198                    Ok(Some(var_result.clone()))
1199                } else if let Some((mpath, gv)) = lookup_globals(analyzer)? {
1200                    self.current_resolved_var_module_path =
1201                        Some(mpath.to_string_lossy().to_string());
1202                    Ok(Some(gv))
1203                } else {
1204                    Ok(None)
1205                }
1206            }
1207            Err(e) => {
1208                debug!(
1209                    "DWARF local lookup error for '{}': {e}; falling back to globals",
1210                    var_name
1211                );
1212                if let Some((mpath, gv)) = lookup_globals(analyzer)? {
1213                    self.current_resolved_var_module_path =
1214                        Some(mpath.to_string_lossy().to_string());
1215                    Ok(Some(gv))
1216                } else {
1217                    Ok(None)
1218                }
1219            }
1220        }
1221    }
1222
1223    /// Get DWARF type size in bytes
1224    pub fn get_dwarf_type_size(dwarf_type: &TypeInfo) -> u64 {
1225        match dwarf_type {
1226            TypeInfo::BaseType { size, .. } => *size,
1227            TypeInfo::PointerType { size, .. } => *size,
1228            TypeInfo::ArrayType { total_size, .. } => total_size.unwrap_or(0),
1229            TypeInfo::StructType { size, .. } => *size,
1230            TypeInfo::UnionType { size, .. } => *size,
1231            TypeInfo::EnumType { size, .. } => *size,
1232            TypeInfo::BitfieldType {
1233                underlying_type, ..
1234            } => {
1235                // Read size equals the storage type size
1236                Self::get_dwarf_type_size(underlying_type)
1237            }
1238            TypeInfo::TypedefType {
1239                underlying_type, ..
1240            } => Self::get_dwarf_type_size(underlying_type),
1241            TypeInfo::QualifiedType {
1242                underlying_type, ..
1243            } => Self::get_dwarf_type_size(underlying_type),
1244            TypeInfo::FunctionType { .. } => 8, // Function pointer size
1245            TypeInfo::UnknownType { .. } => 0,
1246            TypeInfo::OptimizedOut { .. } => 0, // Optimized out has no size
1247        }
1248    }
1249
1250    /// Query DWARF for member access (obj.field)
1251    pub fn query_dwarf_for_member_access(
1252        &mut self,
1253        obj_expr: &crate::script::Expr,
1254        field_name: &str,
1255    ) -> Result<Option<VariableWithEvaluation>> {
1256        // Generic path: try to resolve the base expression first and add constant member offset
1257        if !matches!(obj_expr, crate::script::Expr::Variable(_)) {
1258            if let Some(base_var) = self.query_dwarf_for_complex_expr(obj_expr)? {
1259                if let Some(base_ty) = base_var.dwarf_type.as_ref() {
1260                    fn find_member_offset_and_type(
1261                        t: &ghostscope_dwarf::TypeInfo,
1262                        field: &str,
1263                    ) -> Option<(u64, ghostscope_dwarf::TypeInfo)> {
1264                        match t {
1265                            ghostscope_dwarf::TypeInfo::StructType { members, .. }
1266                            | ghostscope_dwarf::TypeInfo::UnionType { members, .. } => {
1267                                for m in members {
1268                                    if m.name == field {
1269                                        return Some((m.offset, m.member_type.clone()));
1270                                    }
1271                                }
1272                                None
1273                            }
1274                            ghostscope_dwarf::TypeInfo::TypedefType {
1275                                underlying_type, ..
1276                            }
1277                            | ghostscope_dwarf::TypeInfo::QualifiedType {
1278                                underlying_type, ..
1279                            } => find_member_offset_and_type(underlying_type, field),
1280                            _ => None,
1281                        }
1282                    }
1283                    // Optional auto-deref for pointer-to-aggregate
1284                    let mut effective_ty = base_ty.clone();
1285                    let mut effective_eval = base_var.evaluation_result.clone();
1286                    // unwrap typedef/qualifier for pointer detection
1287                    fn unwrap_typedef(
1288                        mut t: &ghostscope_dwarf::TypeInfo,
1289                    ) -> &ghostscope_dwarf::TypeInfo {
1290                        while let ghostscope_dwarf::TypeInfo::TypedefType {
1291                            underlying_type, ..
1292                        }
1293                        | ghostscope_dwarf::TypeInfo::QualifiedType {
1294                            underlying_type,
1295                            ..
1296                        } = t
1297                        {
1298                            t = underlying_type.as_ref();
1299                        }
1300                        t
1301                    }
1302                    let unwrapped = unwrap_typedef(&effective_ty);
1303                    if let ghostscope_dwarf::TypeInfo::PointerType { target_type, .. } = unwrapped {
1304                        // Insert a dereference step into evaluation
1305                        effective_eval = self.compute_pointer_dereference(&effective_eval)?;
1306                        effective_ty = *target_type.clone();
1307                    }
1308
1309                    if let Some((member_off, member_ty)) =
1310                        find_member_offset_and_type(&effective_ty, field_name)
1311                    {
1312                        use ghostscope_dwarf::{
1313                            ComputeStep as CS, EvaluationResult as ER, LocationResult as LR,
1314                        };
1315                        let new_eval = match &effective_eval {
1316                            ER::MemoryLocation(LR::Address(a)) => {
1317                                ER::MemoryLocation(LR::Address(a + member_off))
1318                            }
1319                            ER::MemoryLocation(LR::ComputedLocation { steps }) => {
1320                                let mut s = steps.clone();
1321                                s.push(CS::PushConstant(member_off as i64));
1322                                s.push(CS::Add);
1323                                ER::MemoryLocation(LR::ComputedLocation { steps: s })
1324                            }
1325                            ER::MemoryLocation(LR::RegisterAddress {
1326                                register,
1327                                offset,
1328                                size,
1329                            }) => {
1330                                let new_off = offset.unwrap_or(0).saturating_add(member_off as i64);
1331                                ER::MemoryLocation(LR::RegisterAddress {
1332                                    register: *register,
1333                                    offset: Some(new_off),
1334                                    size: *size,
1335                                })
1336                            }
1337                            _ => {
1338                                return Err(CodeGenError::NotImplemented(
1339                                    "Member access on non-addressable expression".to_string(),
1340                                ))
1341                            }
1342                        };
1343                        let name = format!("{}.{}", base_var.name, field_name);
1344                        let v = VariableWithEvaluation {
1345                            name,
1346                            type_name: member_ty.type_name(),
1347                            dwarf_type: Some(member_ty),
1348                            evaluation_result: new_eval,
1349                            scope_depth: base_var.scope_depth,
1350                            is_parameter: base_var.is_parameter,
1351                            is_artificial: base_var.is_artificial,
1352                        };
1353                        return Ok(Some(v));
1354                    }
1355                }
1356            }
1357            // Try a planner-based chain if generic path missed and base is a pure identifier chain
1358            {
1359                fn flatten_ident_chain<'a>(
1360                    e: &'a crate::script::Expr,
1361                    out: &mut Vec<&'a str>,
1362                ) -> bool {
1363                    match e {
1364                        crate::script::Expr::Variable(name) => {
1365                            out.push(name.as_str());
1366                            true
1367                        }
1368                        crate::script::Expr::MemberAccess(obj, field) => {
1369                            if flatten_ident_chain(obj, out) {
1370                                out.push(field.as_str());
1371                                true
1372                            } else {
1373                                false
1374                            }
1375                        }
1376                        _ => false,
1377                    }
1378                }
1379                let mut segs: Vec<&str> = Vec::new();
1380                if flatten_ident_chain(obj_expr, &mut segs) && !segs.is_empty() {
1381                    let mut chain: Vec<String> = segs.into_iter().map(|s| s.to_string()).collect();
1382                    chain.push(field_name.to_string());
1383                    return self.query_dwarf_for_chain_access(&chain);
1384                }
1385            }
1386            // fall through to legacy variable-only behavior
1387        }
1388        // Support simple variable base and fall back to global/static lowering
1389        if let crate::script::Expr::Variable(base_name) = obj_expr {
1390            let Some(analyzer_ptr) = self.process_analyzer else {
1391                return Err(CodeGenError::DwarfError(
1392                    "No DWARF analyzer available".to_string(),
1393                ));
1394            };
1395            let analyzer = unsafe { &mut *analyzer_ptr };
1396            let ctx = self.get_compile_time_context()?;
1397            let module_address = ghostscope_dwarf::ModuleAddress::new(
1398                std::path::PathBuf::from(ctx.module_path.clone()),
1399                ctx.pc_address,
1400            );
1401            // Try current module at PC first
1402            match analyzer.plan_chain_access(&module_address, base_name, &[field_name.to_string()])
1403            {
1404                Ok(Some(var)) => return Ok(Some(var)),
1405                Ok(None) => {}
1406                Err(e) => {
1407                    tracing::debug!("member planner miss at current module: {}", e);
1408                }
1409            }
1410
1411            // Strict cross-module chain planning via analyzer API
1412            match analyzer
1413                .plan_global_chain_access(
1414                    &std::path::PathBuf::from(ctx.module_path.clone()),
1415                    base_name,
1416                    &[field_name.to_string()],
1417                )
1418                .map_err(|e| CodeGenError::DwarfError(e.to_string()))?
1419            {
1420                Some((mpath, v)) => {
1421                    self.current_resolved_var_module_path =
1422                        Some(mpath.to_string_lossy().to_string());
1423                    Ok(Some(v))
1424                }
1425                None => {
1426                    // Friendly unknown-member message for globals: try to resolve the base's type
1427                    // and list available members.
1428                    // We only attempt this for globals to avoid scanning locals aggressively.
1429                    let mut matches = analyzer.find_global_variables_by_name(base_name);
1430                    if !matches.is_empty() {
1431                        // Prefer current module
1432                        let preferred: Vec<(
1433                            std::path::PathBuf,
1434                            ghostscope_dwarf::core::GlobalVariableInfo,
1435                        )> = matches
1436                            .iter()
1437                            .filter(|(p, _)| p.to_string_lossy() == ctx.module_path.as_str())
1438                            .cloned()
1439                            .collect();
1440                        let chosen = if preferred.len() == 1 {
1441                            Some(preferred[0].clone())
1442                        } else if preferred.is_empty() && matches.len() == 1 {
1443                            Some(matches.remove(0))
1444                        } else {
1445                            None
1446                        };
1447                        if let Some((mp, info)) = chosen {
1448                            if let Ok(var) = analyzer.resolve_variable_by_offsets_in_module(
1449                                &mp,
1450                                info.unit_offset,
1451                                info.die_offset,
1452                            ) {
1453                                if let Some(ty) = var.dwarf_type.as_ref() {
1454                                    // Unwrap aliases
1455                                    let mut t = ty;
1456                                    loop {
1457                                        match t {
1458                                            ghostscope_dwarf::TypeInfo::TypedefType {
1459                                                underlying_type,
1460                                                ..
1461                                            } => t = underlying_type.as_ref(),
1462                                            ghostscope_dwarf::TypeInfo::QualifiedType {
1463                                                underlying_type,
1464                                                ..
1465                                            } => t = underlying_type.as_ref(),
1466                                            _ => break,
1467                                        }
1468                                    }
1469                                    let mut kind: Option<&'static str> = None;
1470                                    let mut member_names: Vec<String> = Vec::new();
1471                                    match t {
1472                                        ghostscope_dwarf::TypeInfo::StructType {
1473                                            members, ..
1474                                        } => {
1475                                            kind = Some("struct");
1476                                            member_names =
1477                                                members.iter().map(|m| m.name.clone()).collect();
1478                                        }
1479                                        ghostscope_dwarf::TypeInfo::UnionType {
1480                                            members, ..
1481                                        } => {
1482                                            kind = Some("union");
1483                                            member_names =
1484                                                members.iter().map(|m| m.name.clone()).collect();
1485                                        }
1486                                        _ => {}
1487                                    }
1488                                    if let Some(k) = kind {
1489                                        // Form friendly message consistent with tests
1490                                        // Example: Unknown member 'no_such_member' in struct 'G_STATE'. Known members: a, b, c
1491                                        member_names.sort();
1492                                        member_names.dedup();
1493                                        let list = if member_names.is_empty() {
1494                                            "<none>".to_string()
1495                                        } else {
1496                                            member_names.join(", ")
1497                                        };
1498                                        let msg = format!(
1499                                            "Unknown member '{field_name}' in {k} '{base_name}' (known members: {list})"
1500                                        );
1501                                        return Err(CodeGenError::TypeError(msg));
1502                                    }
1503                                }
1504                            }
1505                        }
1506                    }
1507                    Ok(None)
1508                }
1509            }
1510        } else {
1511            Err(CodeGenError::NotImplemented(
1512                "MemberAccess base must be a simple variable (use chain access)".to_string(),
1513            ))
1514        }
1515    }
1516
1517    /// Query DWARF for array access (arr[index])
1518    pub fn query_dwarf_for_array_access(
1519        &mut self,
1520        array_expr: &crate::script::Expr,
1521        index_expr: &crate::script::Expr,
1522    ) -> Result<Option<VariableWithEvaluation>> {
1523        // Prefer planner for simple identifier chains like a.b.c as array base to avoid nested member lookups
1524        if let crate::script::Expr::MemberAccess(_, _) = array_expr {
1525            // Try to flatten to a chain of identifiers
1526            fn flatten_chain<'a>(e: &'a crate::script::Expr, out: &mut Vec<&'a str>) -> bool {
1527                match e {
1528                    crate::script::Expr::Variable(name) => {
1529                        out.push(name.as_str());
1530                        true
1531                    }
1532                    crate::script::Expr::MemberAccess(obj, field) => {
1533                        if flatten_chain(obj, out) {
1534                            out.push(field.as_str());
1535                            true
1536                        } else {
1537                            false
1538                        }
1539                    }
1540                    _ => false,
1541                }
1542            }
1543            let mut segs: Vec<&str> = Vec::new();
1544            if flatten_chain(array_expr, &mut segs) && !segs.is_empty() {
1545                let Some(analyzer_ptr) = self.process_analyzer else {
1546                    return Err(CodeGenError::DwarfError(
1547                        "No DWARF analyzer available".to_string(),
1548                    ));
1549                };
1550                let analyzer = unsafe { &mut *analyzer_ptr };
1551                let ctx = self.get_compile_time_context()?;
1552                let module_address = ghostscope_dwarf::ModuleAddress::new(
1553                    std::path::PathBuf::from(ctx.module_path.clone()),
1554                    ctx.pc_address,
1555                );
1556                let base = segs[0].to_string();
1557                let rest: Vec<String> = segs[1..].iter().map(|s| s.to_string()).collect();
1558                if let Ok(Some(var)) = analyzer.plan_chain_access(&module_address, &base, &rest) {
1559                    // Use planner result as array base
1560                    let base_var = var;
1561                    return self.finish_array_access_from_base(base_var, index_expr);
1562                }
1563            }
1564        }
1565
1566        // Fallback: resolve the base array via generic complex expr path
1567        let base_var = match self.query_dwarf_for_complex_expr(array_expr)? {
1568            Some(var) => var,
1569            None => return Ok(None),
1570        };
1571
1572        self.finish_array_access_from_base(base_var, index_expr)
1573    }
1574
1575    fn finish_array_access_from_base(
1576        &mut self,
1577        base_var: VariableWithEvaluation,
1578        index_expr: &crate::script::Expr,
1579    ) -> Result<Option<VariableWithEvaluation>> {
1580        // Get the array's type
1581        let array_type = match &base_var.dwarf_type {
1582            Some(type_info) => type_info,
1583            None => return Ok(None),
1584        };
1585
1586        // Extract element type from array type
1587        let element_type = match array_type {
1588            TypeInfo::ArrayType { element_type, .. } => element_type.as_ref().clone(),
1589            _ => return Ok(None), // Not an array type
1590        };
1591
1592        // Calculate element size for address computation
1593        let element_size = element_type.size();
1594
1595        // For indexing, create a computed location representing: base + (index * element_size)
1596        // Only literal integer indices are supported at this stage
1597        let index_value: i64 = match index_expr {
1598            crate::script::Expr::Int(v) => *v,
1599            _ => {
1600                return Err(CodeGenError::NotImplemented(
1601                    "Only literal integer array indices are supported (TODO)".to_string(),
1602                ))
1603            }
1604        };
1605        let element_evaluation_result = match &base_var.evaluation_result {
1606            EvaluationResult::DirectValue(_) => {
1607                // If base is a value, we can't do array indexing
1608                return Ok(None);
1609            }
1610            EvaluationResult::MemoryLocation(location) => {
1611                match location {
1612                    // Address(base): perform Address arithmetic so ASLR logic applies uniformly
1613                    LocationResult::Address(addr) => {
1614                        let offs = (index_value as i128) * (element_size as i128);
1615                        let new_addr = (*addr as i128).saturating_add(offs);
1616                        if new_addr < 0 {
1617                            return Err(CodeGenError::LLVMError(
1618                                "negative address after indexing".to_string(),
1619                            ));
1620                        }
1621                        EvaluationResult::MemoryLocation(LocationResult::Address(new_addr as u64))
1622                    }
1623                    // Register/Computed: build compute steps at runtime
1624                    _ => {
1625                        let array_access_steps =
1626                            self.create_array_access_steps(location, element_size, index_value);
1627                        EvaluationResult::MemoryLocation(LocationResult::ComputedLocation {
1628                            steps: array_access_steps,
1629                        })
1630                    }
1631                }
1632            }
1633            EvaluationResult::Optimized => {
1634                return Ok(None);
1635            }
1636            EvaluationResult::Composite(_) => {
1637                // Array access on composite locations is complex, skip for now
1638                return Ok(None);
1639            }
1640        };
1641
1642        // Build readable element name: base_name[index]
1643        let elem_name = format!("{}[{}]", base_var.name, index_value);
1644        let element_var = VariableWithEvaluation {
1645            name: elem_name,
1646            type_name: Self::type_info_to_name(&element_type),
1647            dwarf_type: Some(element_type),
1648            evaluation_result: element_evaluation_result,
1649            scope_depth: base_var.scope_depth,
1650            is_parameter: false,
1651            is_artificial: false,
1652        };
1653
1654        Ok(Some(element_var))
1655    }
1656
1657    /// Query DWARF for chain access (person.name.first)
1658    pub fn query_dwarf_for_chain_access(
1659        &mut self,
1660        chain: &[String],
1661    ) -> Result<Option<VariableWithEvaluation>> {
1662        if chain.is_empty() {
1663            return Ok(None);
1664        }
1665        // If chain has only one element, treat it as a simple variable and reuse variable lookup.
1666        if chain.len() == 1 {
1667            return self.query_dwarf_for_variable(&chain[0]);
1668        }
1669        // Planner path only; do not fallback. If planning fails, surface an error.
1670        let Some(analyzer_ptr) = self.process_analyzer else {
1671            return Err(CodeGenError::DwarfError(
1672                "No DWARF analyzer available".to_string(),
1673            ));
1674        };
1675        let analyzer = unsafe { &mut *analyzer_ptr };
1676        let ctx = self.get_compile_time_context()?;
1677        // First attempt: current module at current PC (locals/params)
1678        let module_address = ghostscope_dwarf::ModuleAddress::new(
1679            std::path::PathBuf::from(ctx.module_path.clone()),
1680            ctx.pc_address,
1681        );
1682        match analyzer.plan_chain_access(&module_address, &chain[0], &chain[1..]) {
1683            Ok(Some(var)) => return Ok(Some(var)),
1684            Ok(None) => {}
1685            Err(e) => {
1686                // Treat planner errors as a miss and continue to global fallback
1687                tracing::debug!("chain planner miss at current module: {}", e);
1688            }
1689        }
1690
1691        let base = &chain[0];
1692        let rest = &chain[1..];
1693        match analyzer
1694            .plan_global_chain_access(
1695                &std::path::PathBuf::from(ctx.module_path.clone()),
1696                base,
1697                rest,
1698            )
1699            .map_err(|e| CodeGenError::DwarfError(e.to_string()))?
1700        {
1701            Some((mpath, v)) => {
1702                self.current_resolved_var_module_path = Some(mpath.to_string_lossy().to_string());
1703                Ok(Some(v))
1704            }
1705            None => {
1706                // Friendly message for unknown member on global in simple two-segment chains
1707                if chain.len() == 2 {
1708                    let field_name = &chain[1];
1709                    let mut matches = analyzer.find_global_variables_by_name(base);
1710                    if !matches.is_empty() {
1711                        let preferred: Vec<(
1712                            std::path::PathBuf,
1713                            ghostscope_dwarf::core::GlobalVariableInfo,
1714                        )> = matches
1715                            .iter()
1716                            .filter(|(p, _)| p.to_string_lossy() == ctx.module_path.as_str())
1717                            .cloned()
1718                            .collect();
1719                        let chosen = if preferred.len() == 1 {
1720                            Some(preferred[0].clone())
1721                        } else if preferred.is_empty() && matches.len() == 1 {
1722                            Some(matches.remove(0))
1723                        } else {
1724                            None
1725                        };
1726                        if let Some((mp, info)) = chosen {
1727                            if let Ok(var) = analyzer.resolve_variable_by_offsets_in_module(
1728                                &mp,
1729                                info.unit_offset,
1730                                info.die_offset,
1731                            ) {
1732                                if let Some(ty) = var.dwarf_type.as_ref() {
1733                                    // Unwrap typedef/qualified
1734                                    let mut t = ty;
1735                                    loop {
1736                                        match t {
1737                                            ghostscope_dwarf::TypeInfo::TypedefType {
1738                                                underlying_type,
1739                                                ..
1740                                            } => t = underlying_type.as_ref(),
1741                                            ghostscope_dwarf::TypeInfo::QualifiedType {
1742                                                underlying_type,
1743                                                ..
1744                                            } => t = underlying_type.as_ref(),
1745                                            _ => break,
1746                                        }
1747                                    }
1748                                    let mut kind: Option<&'static str> = None;
1749                                    let mut member_names: Vec<String> = Vec::new();
1750                                    match t {
1751                                        ghostscope_dwarf::TypeInfo::StructType {
1752                                            members, ..
1753                                        } => {
1754                                            kind = Some("struct");
1755                                            member_names =
1756                                                members.iter().map(|m| m.name.clone()).collect();
1757                                        }
1758                                        ghostscope_dwarf::TypeInfo::UnionType {
1759                                            members, ..
1760                                        } => {
1761                                            kind = Some("union");
1762                                            member_names =
1763                                                members.iter().map(|m| m.name.clone()).collect();
1764                                        }
1765                                        _ => {}
1766                                    }
1767                                    if let Some(k) = kind {
1768                                        member_names.sort();
1769                                        member_names.dedup();
1770                                        let list = if member_names.is_empty() {
1771                                            "<none>".to_string()
1772                                        } else {
1773                                            member_names.join(", ")
1774                                        };
1775                                        let msg = format!(
1776                                            "Unknown member '{field_name}' in {k} '{base}' (known members: {list})"
1777                                        );
1778                                        return Err(CodeGenError::TypeError(msg));
1779                                    }
1780                                }
1781                            }
1782                        }
1783                    }
1784                }
1785                Ok(None)
1786            }
1787        }
1788        // unreachable
1789    }
1790
1791    /// Query DWARF for pointer dereference (*ptr)
1792    pub fn query_dwarf_for_pointer_deref(
1793        &mut self,
1794        expr: &crate::script::Expr,
1795    ) -> Result<Option<VariableWithEvaluation>> {
1796        // First, resolve the pointer expression
1797        let ptr_var = match self.query_dwarf_for_complex_expr(expr)? {
1798            Some(var) => var,
1799            None => return Ok(None),
1800        };
1801
1802        // Get the pointer's type
1803        let ptr_type = match &ptr_var.dwarf_type {
1804            Some(type_info) => type_info,
1805            None => return Ok(None),
1806        };
1807
1808        // Extract pointed-to type from pointer type
1809        let mut pointed_type = match ptr_type {
1810            TypeInfo::PointerType { target_type, .. } => target_type.as_ref().clone(),
1811            _ => return Ok(None), // Not a pointer type
1812        };
1813
1814        // Upgrade UnknownType(target_name) using analyzer/type index to get a shallow type.
1815        // 1) Struct/union/class/enum: try analyzer shallow lookup by name (module-scoped first)
1816        // 2) Do not guess builtin type sizes — rely only on DWARF base type entries
1817        if let TypeInfo::UnknownType { name } = &pointed_type {
1818            let mut candidate_names: Vec<String> = Vec::new();
1819            if !name.is_empty() && name != "void" {
1820                candidate_names.push(name.clone());
1821            }
1822            // Fallback: derive from pointer variable's pretty type name, e.g., "GlobalState*" => "GlobalState"
1823            if candidate_names.is_empty() {
1824                let tn = ptr_var.type_name.trim().to_string();
1825                if let Some(idx) = tn.find('*') {
1826                    let mut base = tn[..idx].trim().to_string();
1827                    // Strip common qualifiers and tags
1828                    for prefix in [
1829                        "const ",
1830                        "volatile ",
1831                        "restrict ",
1832                        "struct ",
1833                        "class ",
1834                        "union ",
1835                    ] {
1836                        if base.starts_with(prefix) {
1837                            base = base[prefix.len()..].trim().to_string();
1838                        }
1839                    }
1840                    if !base.is_empty() && base != "void" {
1841                        candidate_names.push(base);
1842                    }
1843                }
1844            }
1845            if let Some(analyzer_ptr) = self.process_analyzer {
1846                let analyzer = unsafe { &mut *analyzer_ptr };
1847                let ctx = self.get_compile_time_context()?;
1848                let mut alias_used: Option<String> = None;
1849                for n in candidate_names {
1850                    // Prefer cross-module definitions first to avoid forward decls with size=0 in current CU
1851                    let mut upgraded: Option<TypeInfo> = None;
1852                    // struct/class
1853                    if let Some(ti) = analyzer.resolve_struct_type_shallow_by_name(&n) {
1854                        if ti.size() > 0 {
1855                            upgraded = Some(ti);
1856                        }
1857                    }
1858                    if upgraded.is_none() {
1859                        if let Some(ti) = analyzer
1860                            .resolve_struct_type_shallow_by_name_in_module(&ctx.module_path, &n)
1861                        {
1862                            if ti.size() > 0 {
1863                                upgraded = Some(ti);
1864                            }
1865                        }
1866                    }
1867                    // union
1868                    if upgraded.is_none() {
1869                        if let Some(ti) = analyzer.resolve_union_type_shallow_by_name(&n) {
1870                            if ti.size() > 0 {
1871                                upgraded = Some(ti);
1872                            }
1873                        }
1874                    }
1875                    if upgraded.is_none() {
1876                        if let Some(ti) = analyzer
1877                            .resolve_union_type_shallow_by_name_in_module(&ctx.module_path, &n)
1878                        {
1879                            if ti.size() > 0 {
1880                                upgraded = Some(ti);
1881                            }
1882                        }
1883                    }
1884                    // enum
1885                    if upgraded.is_none() {
1886                        if let Some(ti) = analyzer.resolve_enum_type_shallow_by_name(&n) {
1887                            if ti.size() > 0 {
1888                                upgraded = Some(ti);
1889                            }
1890                        }
1891                    }
1892                    if upgraded.is_none() {
1893                        if let Some(ti) = analyzer
1894                            .resolve_enum_type_shallow_by_name_in_module(&ctx.module_path, &n)
1895                        {
1896                            if ti.size() > 0 {
1897                                upgraded = Some(ti);
1898                            }
1899                        }
1900                    }
1901                    if let Some(ti) = upgraded {
1902                        pointed_type = ti;
1903                        alias_used = Some(n.clone());
1904                        break;
1905                    }
1906                }
1907
1908                // If we upgraded to an aggregate and have an alias name, wrap it as a typedef
1909                if let Some(alias) = alias_used {
1910                    match &pointed_type {
1911                        TypeInfo::StructType { .. }
1912                        | TypeInfo::UnionType { .. }
1913                        | TypeInfo::EnumType { .. } => {
1914                            pointed_type = TypeInfo::TypedefType {
1915                                name: alias,
1916                                underlying_type: Box::new(pointed_type.clone()),
1917                            };
1918                        }
1919                        _ => {}
1920                    }
1921                }
1922            }
1923        }
1924
1925        // Create dereferenced variable
1926        let deref_var = VariableWithEvaluation {
1927            name: format!("*{}", Self::expr_to_string(expr)),
1928            type_name: Self::type_info_to_name(&pointed_type),
1929            dwarf_type: Some(pointed_type),
1930            evaluation_result: self.compute_pointer_dereference(&ptr_var.evaluation_result)?,
1931            scope_depth: ptr_var.scope_depth,
1932            is_parameter: false,
1933            is_artificial: false,
1934        };
1935
1936        Ok(Some(deref_var))
1937    }
1938
1939    /// Helper: Compute pointer dereference
1940    fn compute_pointer_dereference(
1941        &self,
1942        ptr_result: &EvaluationResult,
1943    ) -> Result<EvaluationResult> {
1944        use ghostscope_dwarf::{ComputeStep, LocationResult, MemoryAccessSize};
1945
1946        match ptr_result {
1947            // If the pointer is a memory location, we need to read that location first,
1948            // then use the result as an address for another read
1949            EvaluationResult::MemoryLocation(location) => {
1950                let steps = [
1951                    self.location_to_compute_steps(location),
1952                    // Then dereference the pointer (read from the computed address)
1953                    vec![ComputeStep::Dereference {
1954                        size: MemoryAccessSize::U64,
1955                    }],
1956                ]
1957                .concat();
1958
1959                Ok(EvaluationResult::MemoryLocation(
1960                    LocationResult::ComputedLocation { steps },
1961                ))
1962            }
1963            // If the pointer value is held directly (common for function parameters)
1964            // interpret the value as an address to the pointed-to object.
1965            EvaluationResult::DirectValue(dv) => {
1966                use ghostscope_dwarf::DirectValueResult as DV;
1967                match dv {
1968                    DV::RegisterValue(reg) => Ok(EvaluationResult::MemoryLocation(
1969                        LocationResult::RegisterAddress {
1970                            register: *reg,
1971                            offset: None,
1972                            size: None,
1973                        },
1974                    )),
1975                    DV::Constant(val) => Ok(EvaluationResult::MemoryLocation(
1976                        LocationResult::Address(*val as u64),
1977                    )),
1978                    DV::ImplicitValue(bytes) => {
1979                        // Assemble up to 8 bytes little-endian into u64
1980                        let mut v: u64 = 0;
1981                        for (i, b) in bytes.iter().take(8).enumerate() {
1982                            v |= (*b as u64) << (8 * i);
1983                        }
1984                        Ok(EvaluationResult::MemoryLocation(LocationResult::Address(v)))
1985                    }
1986                    DV::ComputedValue { steps, .. } => Ok(EvaluationResult::MemoryLocation(
1987                        LocationResult::ComputedLocation {
1988                            steps: steps.clone(),
1989                        },
1990                    )),
1991                }
1992            }
1993            _ => Err(CodeGenError::NotImplemented(
1994                "Unsupported pointer dereference scenario".to_string(),
1995            )),
1996        }
1997    }
1998
1999    /// Helper: Convert location to compute steps
2000    fn location_to_compute_steps(&self, location: &LocationResult) -> Vec<ComputeStep> {
2001        use ghostscope_dwarf::{ComputeStep, LocationResult};
2002
2003        match location {
2004            LocationResult::Address(addr) => {
2005                vec![ComputeStep::PushConstant(*addr as i64)]
2006            }
2007            LocationResult::RegisterAddress {
2008                register, offset, ..
2009            } => {
2010                let mut steps = vec![ComputeStep::LoadRegister(*register)];
2011                if let Some(offset) = offset {
2012                    steps.push(ComputeStep::PushConstant(*offset));
2013                    steps.push(ComputeStep::Add);
2014                }
2015                steps
2016            }
2017            LocationResult::ComputedLocation { steps } => steps.clone(),
2018        }
2019    }
2020
2021    /// Helper: Convert expression to string for debugging
2022    fn expr_to_string(expr: &crate::script::Expr) -> String {
2023        use crate::script::Expr;
2024
2025        match expr {
2026            Expr::Variable(name) => name.clone(),
2027            Expr::MemberAccess(obj, field) => format!("{}.{}", Self::expr_to_string(obj), field),
2028            Expr::ArrayAccess(arr, _) => format!("{}[index]", Self::expr_to_string(arr)),
2029            Expr::ChainAccess(chain) => chain.join("."),
2030            Expr::PointerDeref(expr) => format!("*{}", Self::expr_to_string(expr)),
2031            _ => "expr".to_string(),
2032        }
2033    }
2034
2035    /// Helper: Extract readable name from TypeInfo
2036    fn type_info_to_name(type_info: &TypeInfo) -> String {
2037        match type_info {
2038            TypeInfo::BaseType { name, .. } => name.clone(),
2039            TypeInfo::PointerType { target_type, .. } => {
2040                format!("{}*", Self::type_info_to_name(target_type))
2041            }
2042            TypeInfo::ArrayType {
2043                element_type,
2044                element_count,
2045                ..
2046            } => {
2047                if let Some(count) = element_count {
2048                    format!("{}[{}]", Self::type_info_to_name(element_type), count)
2049                } else {
2050                    format!("{}[]", Self::type_info_to_name(element_type))
2051                }
2052            }
2053            TypeInfo::StructType { name, .. } => format!("struct {name}"),
2054            TypeInfo::UnionType { name, .. } => format!("union {name}"),
2055            TypeInfo::EnumType { name, .. } => format!("enum {name}"),
2056            TypeInfo::BitfieldType {
2057                underlying_type,
2058                bit_offset,
2059                bit_size,
2060            } => {
2061                format!(
2062                    "bitfield<{}:{}> {}",
2063                    bit_offset,
2064                    bit_size,
2065                    Self::type_info_to_name(underlying_type)
2066                )
2067            }
2068            TypeInfo::TypedefType { name, .. } => name.clone(),
2069            TypeInfo::QualifiedType {
2070                underlying_type, ..
2071            } => Self::type_info_to_name(underlying_type),
2072            TypeInfo::FunctionType { .. } => "function".to_string(),
2073            TypeInfo::UnknownType { name } => name.clone(),
2074            TypeInfo::OptimizedOut { name } => format!("<optimized_out> {name}"),
2075        }
2076    }
2077
2078    /// Create computation steps for array access: base_address + (index * element_size)
2079    fn create_array_access_steps(
2080        &self,
2081        base_location: &LocationResult,
2082        element_size: u64,
2083        index: i64,
2084    ) -> Vec<ComputeStep> {
2085        let mut steps = Vec::new();
2086
2087        // First, get the base address computation steps
2088        match base_location {
2089            LocationResult::Address(addr) => {
2090                steps.push(ComputeStep::PushConstant(*addr as i64));
2091            }
2092            LocationResult::RegisterAddress {
2093                register, offset, ..
2094            } => {
2095                steps.push(ComputeStep::LoadRegister(*register));
2096                if let Some(offset) = offset {
2097                    if *offset != 0 {
2098                        steps.push(ComputeStep::PushConstant(*offset));
2099                        steps.push(ComputeStep::Add);
2100                    }
2101                }
2102            }
2103            LocationResult::ComputedLocation { steps: base_steps } => {
2104                steps.extend(base_steps.clone());
2105            }
2106        }
2107
2108        // Now add array indexing computation: current_address + (index * element_size)
2109        steps.push(ComputeStep::PushConstant(index)); // literal index
2110        steps.push(ComputeStep::PushConstant(element_size as i64)); // element_size
2111        steps.push(ComputeStep::Mul); // index * element_size
2112        steps.push(ComputeStep::Add); // base_address + (index * element_size)
2113
2114        steps
2115    }
2116
2117    /// Compute a typed pointed-to location for expressions like `ptr +/- K` where K is an element index.
2118    /// Returns a computed location EvaluationResult along with the pointed-to DWARF type.
2119    /// The offset is scaled by the element size of the pointer/array target type.
2120    pub fn compute_pointed_location_with_index(
2121        &mut self,
2122        ptr_expr: &crate::script::Expr,
2123        index: i64,
2124    ) -> Result<(EvaluationResult, TypeInfo)> {
2125        use ghostscope_dwarf::{
2126            ComputeStep, EvaluationResult as ER, LocationResult as LR, TypeInfo,
2127        };
2128
2129        // Resolve the pointer expression via DWARF
2130        let ptr_var = self
2131            .query_dwarf_for_complex_expr(ptr_expr)?
2132            .ok_or_else(|| CodeGenError::VariableNotFound(format!("{ptr_expr:?}")))?;
2133
2134        let ptr_ty = ptr_var.dwarf_type.as_ref().ok_or_else(|| {
2135            CodeGenError::DwarfError("Expression has no DWARF type information".to_string())
2136        })?;
2137
2138        // Unwrap typedef/qualified wrappers
2139        let mut ty = ptr_ty;
2140        loop {
2141            match ty {
2142                TypeInfo::TypedefType {
2143                    underlying_type, ..
2144                } => ty = underlying_type.as_ref(),
2145                TypeInfo::QualifiedType {
2146                    underlying_type, ..
2147                } => ty = underlying_type.as_ref(),
2148                _ => break,
2149            }
2150        }
2151
2152        // Extract pointed-to (element) type and element size
2153        let (elem_ty, elem_size) = match ty {
2154            TypeInfo::PointerType { target_type, .. } => {
2155                let et = target_type.as_ref().clone();
2156                let es = et.size();
2157                let es = if es == 0 { 1 } else { es };
2158                (et, es)
2159            }
2160            TypeInfo::ArrayType { element_type, .. } => {
2161                let et = element_type.as_ref().clone();
2162                let es = et.size();
2163                let es = if es == 0 { 1 } else { es };
2164                (et, es)
2165            }
2166            TypeInfo::FunctionType { .. } => {
2167                return Err(CodeGenError::TypeError(
2168                    "Pointer arithmetic is not supported on function pointers".to_string(),
2169                ))
2170            }
2171            _ => {
2172                return Err(CodeGenError::TypeError(
2173                    "Pointer arithmetic requires a pointer or array expression".to_string(),
2174                ))
2175            }
2176        };
2177
2178        // First compute the base pointed-to location for `*ptr_expr`
2179        let base_loc_eval = self.compute_pointer_dereference(&ptr_var.evaluation_result)?;
2180        let base_loc = match &base_loc_eval {
2181            ER::MemoryLocation(loc) => loc,
2182            _ => {
2183                return Err(CodeGenError::DwarfError(
2184                    "Failed to compute base location for pointer arithmetic".to_string(),
2185                ))
2186            }
2187        };
2188
2189        // Build compute steps: base_address + index * elem_size
2190        let steps = {
2191            let mut s = self.location_to_compute_steps(base_loc);
2192            // scale index by element size (can be negative)
2193            s.push(ComputeStep::PushConstant(index));
2194            s.push(ComputeStep::PushConstant(elem_size as i64));
2195            s.push(ComputeStep::Mul);
2196            s.push(ComputeStep::Add);
2197            s
2198        };
2199
2200        Ok((ER::MemoryLocation(LR::ComputedLocation { steps }), elem_ty))
2201    }
2202}
2203
2204#[cfg(test)]
2205mod tests {
2206    use super::*;
2207    use inkwell::context::Context as LlvmContext;
2208
2209    #[test]
2210    fn aggregate_address_returns_pointer_for_struct_and_array() {
2211        let llctx = LlvmContext::create();
2212        let opts = crate::CompileOptions::default();
2213        let mut ctx = EbpfContext::new(&llctx, "agg_ptr", Some(0), &opts).expect("ctx");
2214        // Ensure we have a function/pt_regs to satisfy builders
2215        ctx.create_basic_ebpf_function("f").expect("fn");
2216        // Ensure the ASLR offsets map exists in the module for unified codegen path
2217        ctx.__test_ensure_proc_offsets_map().expect("map");
2218        // Allocate per-invocation pm_key on the stack
2219        ctx.__test_alloc_pm_key().expect("pm_key");
2220        // Provide a minimal compile-time context so address rebasing has a module path
2221        ctx.set_compile_time_context(0, "/nonexistent/module".to_string());
2222
2223        // Struct type
2224        let st = ghostscope_protocol::TypeInfo::StructType {
2225            name: "S".to_string(),
2226            size: 80,
2227            members: vec![],
2228        };
2229        let eval = EvaluationResult::MemoryLocation(LocationResult::Address(0x1000));
2230        let v = ctx
2231            .evaluate_result_to_llvm_value(&eval, &st, "S", 0)
2232            .expect("eval");
2233        match v {
2234            BasicValueEnum::PointerValue(_) => {}
2235            other => panic!("expected PointerValue for struct, got {other:?}"),
2236        }
2237
2238        // Array type
2239        let arr = ghostscope_protocol::TypeInfo::ArrayType {
2240            element_type: Box::new(ghostscope_protocol::TypeInfo::BaseType {
2241                name: "int".to_string(),
2242                size: 4,
2243                encoding: ghostscope_dwarf::constants::DW_ATE_signed.0 as u16,
2244            }),
2245            element_count: Some(4),
2246            total_size: Some(16),
2247        };
2248        let v2 = ctx
2249            .evaluate_result_to_llvm_value(&eval, &arr, "A", 0)
2250            .expect("eval2");
2251        match v2 {
2252            BasicValueEnum::PointerValue(_) => {}
2253            other => panic!("expected PointerValue for array, got {other:?}"),
2254        }
2255    }
2256
2257    #[test]
2258    fn scalar_address_reads_value() {
2259        let llctx = LlvmContext::create();
2260        let opts = crate::CompileOptions::default();
2261        let mut ctx = EbpfContext::new(&llctx, "scalar_val", Some(0), &opts).expect("ctx");
2262        ctx.create_basic_ebpf_function("f").expect("fn");
2263        // Ensure the ASLR offsets map exists in the module for unified codegen path
2264        ctx.__test_ensure_proc_offsets_map().expect("map");
2265        // Allocate per-invocation pm_key on the stack
2266        ctx.__test_alloc_pm_key().expect("pm_key");
2267        // Provide a minimal compile-time context so address rebasing has a module path
2268        ctx.set_compile_time_context(0, "/nonexistent/module".to_string());
2269
2270        // Base int type
2271        let bt = ghostscope_protocol::TypeInfo::BaseType {
2272            name: "int".to_string(),
2273            size: 4,
2274            encoding: ghostscope_dwarf::constants::DW_ATE_signed.0 as u16,
2275        };
2276        let eval = EvaluationResult::MemoryLocation(LocationResult::Address(0x2000));
2277        let v = ctx
2278            .evaluate_result_to_llvm_value(&eval, &bt, "x", 0)
2279            .expect("eval");
2280        match v {
2281            BasicValueEnum::IntValue(_) => {}
2282            other => panic!("expected IntValue for scalar, got {other:?}"),
2283        }
2284    }
2285}