exp_rs/
ffi.rs

1//! Foreign Function Interface (FFI) for C/C++ interoperability
2//!
3//! This module provides a simplified C API for expression evaluation with arena-based memory management.
4//!
5//! # Overview
6//!
7//! The exp-rs FFI provides two main APIs:
8//!
9//! ## Batch API (Advanced, Manual Memory Management)
10//! - Create an arena for memory allocation
11//! - Create a batch builder with the arena
12//! - Add multiple expressions and parameters
13//! - Evaluate all expressions at once
14//! - Manually manage arena lifetime
15//!
16//!
17//! ## Function Support
18//!
19//! The FFI supports two types of functions:
20//!
21//! ### Native Functions
22//! - Implemented in C and passed as function pointers
23//! - Registered with `expr_context_add_function()`
24//! - Example: `sin`, `cos`, `sqrt` implementations
25//!
26//! ### Expression Functions
27//! - Mathematical expressions that can call other functions
28//! - Defined as strings and parsed when registered
29//! - Registered with `expr_context_add_expression_function()`
30//! - Can be removed with `expr_context_remove_expression_function()`
31//! - Example: `distance(x1,y1,x2,y2) = sqrt((x2-x1)^2 + (y2-y1)^2)`
32//!
33//! # Example Usage
34//!
35//! ## Batch API Example
36//! ```c
37//! // Create context with functions
38//! ExprContext* ctx = expr_context_new();
39//! expr_context_add_function(ctx, "sin", 1, native_sin);
40//!
41//! // Add expression functions (mathematical expressions that can call other functions)
42//! expr_context_add_expression_function(ctx, "distance", "x1,y1,x2,y2",
43//!                                      "sqrt((x2-x1)^2 + (y2-y1)^2)");
44//! expr_context_add_expression_function(ctx, "avg", "a,b", "(a+b)/2");
45//!
46//! // Create arena and batch
47//! ExprArena* arena = expr_arena_new(8192);
48//! ExprBatch* batch = expr_batch_new(arena);
49//!
50//! // Add expressions and parameters
51//! expr_batch_add_expression(batch, "x + sin(y)");
52//! expr_batch_add_expression(batch, "distance(0, 0, x, y)");
53//! expr_batch_add_variable(batch, "x", 1.0);
54//! expr_batch_add_variable(batch, "y", 3.14159);
55//!
56//! // Evaluate
57//! expr_batch_evaluate(batch, ctx);
58//! Real result1 = expr_batch_get_result(batch, 0);
59//! Real result2 = expr_batch_get_result(batch, 1);
60//!
61//! // Remove expression functions when no longer needed
62//! expr_context_remove_expression_function(ctx, "avg");
63//!
64//! // Cleanup
65//! expr_batch_free(batch);
66//! expr_arena_free(arena);
67//! expr_context_free(ctx);
68//! ```
69//!
70
71use crate::expression::Expression;
72use crate::{EvalContext, Real};
73use alloc::boxed::Box;
74use alloc::string::ToString;
75use alloc::vec::Vec;
76use bumpalo::Bump;
77use core::ffi::{CStr, c_char, c_void};
78use core::ptr;
79
80// Re-export for external visibility
81pub use crate::expression::Expression as ExpressionExport;
82
83// Magic numbers to detect valid vs freed batches
84// Using 32-bit values for compatibility with 32-bit systems
85const BATCH_MAGIC: usize = 0x7A9F4E82; // Random 32-bit value for valid batch
86const BATCH_FREED: usize = 0x9C2E8B7D; // Random 32-bit value for freed batch
87
88// Internal wrapper that owns both the arena and the batch
89struct BatchWithArena {
90    magic: usize,                    // Magic number for validation
91    arena: *mut Bump,                // Raw pointer to the arena we leaked
92    batch: *mut Expression<'static>, // Raw pointer to the batch
93}
94
95impl Drop for BatchWithArena {
96    fn drop(&mut self) {
97        // Mark as freed to detect double-free
98        self.magic = BATCH_FREED;
99
100        // Drop the batch first (it has references into the arena)
101        if !self.batch.is_null() {
102            unsafe {
103                // Explicitly drop the batch
104                drop(Box::from_raw(self.batch));
105            }
106            self.batch = ptr::null_mut();
107        }
108        // Then drop the arena - this should free Bumpalo's memory
109        if !self.arena.is_null() {
110            unsafe {
111                // Get the arena back as a Box
112                let mut arena_box = Box::from_raw(self.arena);
113                // Reset it first to ensure all chunks are released
114                arena_box.reset();
115                // Now drop it - this should trigger Bump's Drop impl
116                drop(arena_box);
117            }
118            self.arena = ptr::null_mut();
119        }
120    }
121}
122
123// ============================================================================
124// Global Allocator - conditional based on custom_cbindgen_alloc feature
125// ============================================================================
126
127// Allocation tracking
128#[cfg(feature = "alloc_tracking")]
129static TOTAL_ALLOCATED: AtomicUsize = AtomicUsize::new(0);
130#[cfg(feature = "alloc_tracking")]
131static TOTAL_FREED: AtomicUsize = AtomicUsize::new(0);
132#[cfg(feature = "alloc_tracking")]
133static ALLOCATION_COUNT: AtomicUsize = AtomicUsize::new(0);
134#[cfg(feature = "alloc_tracking")]
135static FREE_COUNT: AtomicUsize = AtomicUsize::new(0);
136
137// Detailed allocation tracking (when alloc_tracking feature is enabled)
138#[cfg(feature = "alloc_tracking")]
139mod allocation_tracking {
140    use core::cell::RefCell;
141    use critical_section::Mutex;
142    use heapless::{FnvIndexMap, Vec};
143
144    #[derive(Clone, Copy)]
145    pub struct AllocationInfo {
146        pub size: usize,
147        pub line: u32,
148        pub file: &'static str,
149        pub ptr: usize,
150        pub caller_addr: usize,  // First level caller address
151        pub caller2_addr: usize, // Second level caller address
152    }
153
154    // ARM-specific function to get return addresses from stack
155    #[cfg(target_arch = "arm")]
156    unsafe fn get_caller_addresses() -> (usize, usize) {
157        let lr: usize; // Link register (immediate caller)
158
159        unsafe {
160            // Get link register (return address of immediate caller)
161            core::arch::asm!("mov {}, lr", out(reg) lr);
162        }
163
164        // Skip stack walking to avoid memory faults - just use link register
165        (lr, 0)
166    }
167
168    // Fallback for non-ARM architectures
169    #[cfg(not(target_arch = "arm"))]
170    unsafe fn get_caller_addresses() -> (usize, usize) {
171        (0, 0) // No stack walking support
172    }
173
174    const MAX_TRACKED_ALLOCATIONS: usize = 512;
175    type TrackedAllocations = FnvIndexMap<usize, AllocationInfo, MAX_TRACKED_ALLOCATIONS>;
176
177    static TRACKED_ALLOCATIONS: Mutex<RefCell<TrackedAllocations>> =
178        Mutex::new(RefCell::new(TrackedAllocations::new()));
179
180    pub fn track_allocation(ptr: *mut u8, size: usize, location: &'static core::panic::Location) {
181        if ptr.is_null() {
182            return;
183        }
184
185        // Get caller addresses using ARM stack walking
186        let (caller_addr, caller2_addr) = unsafe { get_caller_addresses() };
187
188        let info = AllocationInfo {
189            size,
190            line: location.line(),
191            file: location.file(),
192            ptr: ptr as usize,
193            caller_addr,
194            caller2_addr,
195        };
196
197        critical_section::with(|cs| {
198            let mut tracked = TRACKED_ALLOCATIONS.borrow(cs).borrow_mut();
199            // If we're at capacity, we'll just not track this allocation (silent failure)
200            let _ = tracked.insert(ptr as usize, info);
201        });
202    }
203
204    pub fn untrack_allocation(ptr: *mut u8) {
205        if ptr.is_null() {
206            return;
207        }
208
209        critical_section::with(|cs| {
210            let mut tracked = TRACKED_ALLOCATIONS.borrow(cs).borrow_mut();
211            tracked.remove(&(ptr as usize));
212        });
213    }
214
215    pub fn get_remaining_allocations() -> Vec<AllocationInfo, MAX_TRACKED_ALLOCATIONS> {
216        critical_section::with(|cs| {
217            let tracked = TRACKED_ALLOCATIONS.borrow(cs).borrow();
218            let mut result = Vec::new();
219            for (_, info) in tracked.iter() {
220                let _ = result.push(*info);
221            }
222            result
223        })
224    }
225}
226
227// When custom_cbindgen_alloc is enabled, use TlsfHeap for embedded targets
228#[cfg(feature = "custom_cbindgen_alloc")]
229mod embedded_allocator {
230    use super::*;
231    use core::alloc::{GlobalAlloc, Layout};
232    use core::sync::atomic::{AtomicUsize, Ordering};
233    use embedded_alloc::TlsfHeap;
234
235    use core::sync::atomic::AtomicBool;
236
237    // Wrapper around TlsfHeap to track allocations
238    pub struct TrackingHeap {
239        heap: TlsfHeap,
240        initialized: AtomicBool,
241    }
242
243    impl TrackingHeap {
244        pub const fn new() -> Self {
245            Self {
246                heap: TlsfHeap::empty(),
247                initialized: AtomicBool::new(false),
248            }
249        }
250
251        pub fn is_initialized(&self) -> bool {
252            self.initialized.load(Ordering::Acquire)
253        }
254
255        pub unsafe fn init(&self, start_addr: usize, size: usize) {
256            unsafe {
257                self.heap.init(start_addr, size);
258            }
259            self.initialized.store(true, Ordering::Release);
260        }
261
262        // Ensure heap is initialized - panics if not explicitly initialized
263        fn ensure_initialized(&self) {
264            if !self.initialized.load(Ordering::Acquire) {
265                // Heap was never explicitly initialized - this is an error
266                panic!("Heap not initialized! Call exp_rs_heap_init() before any allocations");
267            }
268        }
269    }
270
271    unsafe impl GlobalAlloc for TrackingHeap {
272        #[track_caller]
273        unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
274            self.ensure_initialized();
275            let ptr = unsafe { self.heap.alloc(layout) };
276            if !ptr.is_null() {
277                #[cfg(feature = "alloc_tracking")]
278                {
279                    TOTAL_ALLOCATED.fetch_add(layout.size(), Ordering::Relaxed);
280                    ALLOCATION_COUNT.fetch_add(1, Ordering::Relaxed);
281                    // We can't get caller info from GlobalAlloc, but we can track the allocation
282                    // For more detailed tracking, the user would need to use tracked wrapper functions
283                    let location = core::panic::Location::caller();
284                    allocation_tracking::track_allocation(ptr, layout.size(), location);
285                }
286            }
287            ptr
288        }
289
290        #[track_caller]
291        unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
292            self.ensure_initialized();
293            unsafe {
294                self.heap.dealloc(ptr, layout);
295            }
296            #[cfg(feature = "alloc_tracking")]
297            {
298                TOTAL_FREED.fetch_add(layout.size(), Ordering::Relaxed);
299                FREE_COUNT.fetch_add(1, Ordering::Relaxed);
300            }
301
302            // Detailed tracking if feature is enabled
303            #[cfg(feature = "alloc_tracking")]
304            {
305                allocation_tracking::untrack_allocation(ptr);
306            }
307        }
308    }
309
310    // Global heap allocator using TLSF (Two-Level Segregated Fit) algorithm
311    #[global_allocator]
312    pub static HEAP: TrackingHeap = TrackingHeap::new();
313
314    // No static heap allocation - memory provided by caller
315
316    // Current configured heap size (initialized to 0, set by exp_rs_heap_init)
317    pub static CURRENT_HEAP_SIZE: AtomicUsize = AtomicUsize::new(0);
318}
319
320// When custom_cbindgen_alloc is NOT enabled, use standard system allocator
321#[cfg(not(feature = "custom_cbindgen_alloc"))]
322mod system_allocator {
323    extern crate std;
324    use std::alloc::{GlobalAlloc, Layout, System};
325
326    // Wrapper around System allocator to track allocations
327    pub struct TrackingSystemHeap;
328
329    unsafe impl GlobalAlloc for TrackingSystemHeap {
330        #[track_caller]
331        unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
332            let ptr = unsafe { System.alloc(layout) };
333            #[cfg(feature = "alloc_tracking")]
334            if !ptr.is_null() {
335                TOTAL_ALLOCATED.fetch_add(layout.size(), Ordering::Relaxed);
336                ALLOCATION_COUNT.fetch_add(1, Ordering::Relaxed);
337                let location = core::panic::Location::caller();
338                allocation_tracking::track_allocation(ptr, layout.size(), location);
339            }
340            ptr
341        }
342
343        #[track_caller]
344        unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
345            unsafe {
346                System.dealloc(ptr, layout);
347            }
348            #[cfg(feature = "alloc_tracking")]
349            {
350                TOTAL_FREED.fetch_add(layout.size(), Ordering::Relaxed);
351                FREE_COUNT.fetch_add(1, Ordering::Relaxed);
352                allocation_tracking::untrack_allocation(ptr);
353            }
354        }
355    }
356
357    // Global heap allocator using standard system allocator
358    #[global_allocator]
359    pub static HEAP: TrackingSystemHeap = TrackingSystemHeap;
360}
361
362// Initialize heap with provided memory buffer (only available with custom allocator)
363// Returns 0 on success, negative error code on failure
364#[cfg(feature = "custom_cbindgen_alloc")]
365#[unsafe(no_mangle)]
366pub extern "C" fn exp_rs_heap_init(heap_ptr: *mut u8, heap_size: usize) -> i32 {
367    use embedded_allocator::*;
368
369    // Validate parameters
370    if heap_ptr.is_null() {
371        return -1; // Null pointer
372    }
373    if heap_size == 0 {
374        return -3; // Invalid heap size (must be non-zero)
375    }
376
377    // Check if already initialized
378    if HEAP.is_initialized() {
379        return -2; // Already initialized
380    }
381
382    unsafe {
383        HEAP.init(heap_ptr as usize, heap_size);
384        CURRENT_HEAP_SIZE.store(heap_size, core::sync::atomic::Ordering::Release);
385    }
386    0
387}
388
389// Get current configured heap size (only available with custom allocator)
390#[cfg(feature = "custom_cbindgen_alloc")]
391#[unsafe(no_mangle)]
392pub extern "C" fn exp_rs_get_heap_size() -> usize {
393    embedded_allocator::CURRENT_HEAP_SIZE.load(core::sync::atomic::Ordering::Acquire)
394}
395
396// Get allocation statistics for C code
397#[cfg(feature = "alloc_tracking")]
398#[unsafe(no_mangle)]
399pub extern "C" fn exp_rs_get_total_allocated() -> usize {
400    TOTAL_ALLOCATED.load(Ordering::Relaxed)
401}
402
403#[cfg(feature = "alloc_tracking")]
404#[unsafe(no_mangle)]
405pub extern "C" fn exp_rs_get_total_freed() -> usize {
406    TOTAL_FREED.load(Ordering::Relaxed)
407}
408
409#[cfg(feature = "alloc_tracking")]
410#[unsafe(no_mangle)]
411pub extern "C" fn exp_rs_get_allocation_count() -> usize {
412    ALLOCATION_COUNT.load(Ordering::Relaxed)
413}
414
415#[cfg(feature = "alloc_tracking")]
416#[unsafe(no_mangle)]
417pub extern "C" fn exp_rs_get_free_count() -> usize {
418    FREE_COUNT.load(Ordering::Relaxed)
419}
420
421#[cfg(feature = "alloc_tracking")]
422#[unsafe(no_mangle)]
423pub extern "C" fn exp_rs_get_current_allocated() -> usize {
424    let allocated = TOTAL_ALLOCATED.load(Ordering::Relaxed);
425    let freed = TOTAL_FREED.load(Ordering::Relaxed);
426    allocated.saturating_sub(freed)
427}
428
429// C-compatible allocation info struct
430#[cfg(feature = "alloc_tracking")]
431#[repr(C)]
432#[derive(Clone, Copy)]
433pub struct CAllocationInfo {
434    pub size: usize,
435    pub line: u32,
436    pub file_ptr: *const c_char,
437    pub ptr: usize,
438    pub caller_addr: usize,  // First level caller address
439    pub caller2_addr: usize, // Second level caller address
440}
441
442// Get count of remaining allocations (available with alloc_tracking feature)
443#[cfg(feature = "alloc_tracking")]
444#[unsafe(no_mangle)]
445pub extern "C" fn exp_rs_get_remaining_allocation_count() -> usize {
446    use allocation_tracking::*;
447    let remaining = get_remaining_allocations();
448    remaining.len()
449}
450
451// Get a single remaining allocation by index using ExprResult
452// Returns allocation info in the result fields:
453// - status: 0 on success, -1 if index out of bounds, -2 if no tracking
454// - value: allocation size (as Real)
455// - index: allocation line number
456// - error: contains "file:ptr" format string
457#[cfg(feature = "alloc_tracking")]
458#[unsafe(no_mangle)]
459pub extern "C" fn exp_rs_get_remaining_allocation_by_index(allocation_index: usize) -> ExprResult {
460    use allocation_tracking::*;
461    let remaining = get_remaining_allocations();
462
463    if allocation_index >= remaining.len() {
464        return ExprResult::from_ffi_error(-1, "Allocation index out of bounds");
465    }
466
467    let allocation = &remaining[allocation_index];
468
469    // Create info string with caller addresses (limited formatting for no_std)
470    // Format: "filename caller1 caller2" (space separated for parsing)
471    let info_str = allocation.file;
472
473    ExprResult {
474        status: 0,
475        value: allocation.size as Real,
476        index: allocation.line as i32,
477        error: ExprResult::copy_to_error_buffer(info_str),
478    }
479}
480
481// Get remaining allocations data (available with alloc_tracking feature)
482// Returns the number of allocations copied to the output buffer
483// If output_buffer is null, returns the total count needed
484#[cfg(feature = "alloc_tracking")]
485#[unsafe(no_mangle)]
486pub extern "C" fn exp_rs_get_remaining_allocations(
487    output_buffer: *mut CAllocationInfo,
488    buffer_size: usize,
489) -> usize {
490    use allocation_tracking::*;
491    let remaining = get_remaining_allocations();
492
493    if output_buffer.is_null() {
494        return remaining.len();
495    }
496
497    let copy_count = core::cmp::min(remaining.len(), buffer_size);
498
499    for (i, allocation) in remaining.iter().enumerate().take(copy_count) {
500        unsafe {
501            let c_info = CAllocationInfo {
502                size: allocation.size,
503                line: allocation.line,
504                file_ptr: allocation.file.as_ptr() as *const c_char,
505                ptr: allocation.ptr,
506                caller_addr: allocation.caller_addr,
507                caller2_addr: allocation.caller2_addr,
508            };
509            output_buffer.add(i).write(c_info);
510        }
511    }
512
513    copy_count
514}
515
516// Critical section implementation for single-core ARM targets
517#[cfg(all(target_arch = "arm", not(test)))]
518#[unsafe(no_mangle)]
519fn _critical_section_1_0_acquire() -> critical_section::RawRestoreState {
520    // Read current PRIMASK state and disable interrupts
521    let primask: u32;
522    unsafe {
523        core::arch::asm!("mrs {}, primask", out(reg) primask);
524        core::arch::asm!("cpsid i");
525    }
526    primask
527}
528
529#[cfg(all(target_arch = "arm", not(test)))]
530#[unsafe(no_mangle)]
531unsafe fn _critical_section_1_0_release(restore_state: critical_section::RawRestoreState) {
532    // Restore previous interrupt state
533    if restore_state & 1 == 0 {
534        // Interrupts were enabled before, re-enable them
535        unsafe {
536            core::arch::asm!("cpsie i");
537        }
538    }
539    // If bit 0 was set, interrupts were already disabled, so leave them disabled
540}
541
542// Critical section implementation for native/host targets (tests and native builds)
543#[cfg(not(all(target_arch = "arm", not(test))))]
544#[unsafe(no_mangle)]
545fn _critical_section_1_0_acquire() -> critical_section::RawRestoreState {
546    // For native builds, we don't need real critical sections
547    // Just return a dummy value - there are no interrupts to disable
548    0
549}
550
551#[cfg(not(all(target_arch = "arm", not(test))))]
552#[unsafe(no_mangle)]
553unsafe fn _critical_section_1_0_release(_restore_state: critical_section::RawRestoreState) {
554    // For native builds, no-op - there are no interrupts to restore
555}
556
557// ============================================================================
558// Panic Handler Support
559// ============================================================================
560
561/// Global panic flag pointer - set by C code
562#[allow(dead_code)]
563static mut EXP_RS_PANIC_FLAG: *mut i32 = ptr::null_mut();
564
565/// Global log function pointer - set by C code
566#[allow(dead_code)]
567static mut EXP_RS_LOG_FUNCTION: *const c_void = ptr::null();
568
569/// Type for the logging function
570#[allow(dead_code)]
571type LogFunctionType = unsafe extern "C" fn(*const u8, usize);
572
573/// Default panic message
574#[allow(dead_code)]
575static PANIC_DEFAULT_MSG: &[u8] = b"Rust panic occurred\0";
576
577/// Register a panic handler
578///
579/// # Parameters
580/// - `flag_ptr`: Pointer to an integer that will be set to 1 on panic
581/// - `log_func`: Optional logging function pointer (can be NULL)
582///
583/// # Safety
584/// The provided pointers must remain valid for the lifetime of the program
585#[cfg(not(test))]
586#[unsafe(no_mangle)]
587pub unsafe extern "C" fn exp_rs_register_panic_handler(
588    flag_ptr: *mut i32,
589    log_func: *const c_void,
590) {
591    unsafe {
592        EXP_RS_PANIC_FLAG = flag_ptr;
593        EXP_RS_LOG_FUNCTION = log_func;
594    }
595}
596
597// ============================================================================
598// Error Handling
599// ============================================================================
600
601/// Result structure for FFI operations
602#[repr(C)]
603pub struct ExprResult {
604    /// Error code: 0 for success, positive for ExprError, negative for FFI errors
605    status: i32,
606    /// Result value (valid only if status == 0)
607    value: Real,
608    /// Result index (for functions that return an index)
609    index: i32,
610    /// Error message buffer (empty string on success, no freeing needed)
611    error: [c_char; crate::types::EXP_RS_ERROR_BUFFER_SIZE],
612}
613
614impl ExprResult {
615    /// Helper function to copy a string to the error buffer
616    fn copy_to_error_buffer(msg: &str) -> [c_char; crate::types::EXP_RS_ERROR_BUFFER_SIZE] {
617        let mut buffer = [0; crate::types::EXP_RS_ERROR_BUFFER_SIZE];
618        let bytes = msg.as_bytes();
619        let copy_len = core::cmp::min(bytes.len(), crate::types::EXP_RS_ERROR_BUFFER_SIZE - 1);
620
621        for i in 0..copy_len {
622            buffer[i] = bytes[i] as c_char;
623        }
624        buffer[copy_len] = 0; // Null terminator
625        buffer
626    }
627    /// Create a success result with a value
628    fn success_value(value: Real) -> Self {
629        ExprResult {
630            status: 0,
631            value,
632            index: 0,
633            error: [0; crate::types::EXP_RS_ERROR_BUFFER_SIZE],
634        }
635    }
636
637    /// Create a success result with an index
638    fn success_index(index: usize) -> Self {
639        ExprResult {
640            status: 0,
641            value: 0.0,
642            index: index as i32,
643            error: [0; crate::types::EXP_RS_ERROR_BUFFER_SIZE],
644        }
645    }
646
647    /// Create an error result from an ExprError
648    fn from_expr_error(err: crate::error::ExprError) -> Self {
649        let error_code = err.error_code();
650        let error_msg = err.to_string(); // Use Display trait
651
652        ExprResult {
653            status: error_code,
654            value: Real::NAN,
655            index: -1,
656            error: Self::copy_to_error_buffer(&error_msg),
657        }
658    }
659
660    /// Create an error result for FFI-specific errors
661    fn from_ffi_error(code: i32, msg: &str) -> Self {
662        ExprResult {
663            status: code,
664            value: Real::NAN,
665            index: -1,
666            error: Self::copy_to_error_buffer(msg),
667        }
668    }
669}
670
671/// FFI error codes (negative to distinguish from ExprError codes)
672pub const FFI_ERROR_NULL_POINTER: i32 = -1;
673pub const FFI_ERROR_INVALID_UTF8: i32 = -2;
674pub const FFI_ERROR_NO_ARENA_AVAILABLE: i32 = -3;
675pub const FFI_ERROR_CANNOT_GET_MUTABLE_ACCESS: i32 = -4;
676pub const FFI_ERROR_INVALID_POINTER: i32 = -5;
677
678// ============================================================================
679// Opaque Types with Better Names
680// ============================================================================
681
682/// Opaque type for evaluation context
683#[repr(C)]
684pub struct ExprContext {
685    _private: [u8; 0],
686}
687
688/// Opaque type for expression batch
689#[repr(C)]
690pub struct ExprBatch {
691    _private: [u8; 0],
692}
693
694/// Opaque type for memory arena
695#[repr(C)]
696pub struct ExprArena {
697    _private: [u8; 0],
698}
699
700// ============================================================================
701// Native Function Support
702// ============================================================================
703
704/// Native function signature
705pub type NativeFunc = extern "C" fn(args: *const Real, n_args: usize) -> Real;
706
707// ============================================================================
708// Context Management
709// ============================================================================
710
711/// Create a new evaluation context
712///
713/// The context holds function definitions and can be reused across evaluations.
714///
715/// # Returns
716/// Pointer to new context, or NULL on allocation failure
717///
718/// # Safety
719/// The returned pointer must be freed with expr_context_free()
720#[unsafe(no_mangle)]
721pub extern "C" fn expr_context_new() -> *mut ExprContext {
722    let ctx = EvalContext::new();
723    let ctx_rc = alloc::rc::Rc::new(ctx);
724    let ctx = Box::new(ctx_rc);
725    Box::into_raw(ctx) as *mut ExprContext
726}
727
728/// Create a new evaluation context without any pre-registered functions
729///
730/// This creates a context with no built-in functions or constants.
731/// Note that basic operators (+, -, *, /, %, <, >, <=, >=, ==, !=) are still
732/// available as they are handled by the parser, not the function registry.
733///
734/// # Returns
735/// Pointer to new empty context, or NULL on allocation failure
736///
737/// # Safety
738/// The returned pointer must be freed with expr_context_free()
739///
740/// # Example
741/// ```c
742/// ExprContext* ctx = expr_context_new_empty();
743/// // Must register all functions manually
744/// expr_context_add_function(ctx, "+", 2, add_func);
745/// expr_context_add_function(ctx, "*", 2, mul_func);
746/// ```
747#[unsafe(no_mangle)]
748pub extern "C" fn expr_context_new_empty() -> *mut ExprContext {
749    let ctx = EvalContext::empty();
750    let ctx_rc = alloc::rc::Rc::new(ctx);
751    let ctx = Box::new(ctx_rc);
752    Box::into_raw(ctx) as *mut ExprContext
753}
754
755/// Free an evaluation context
756///
757/// # Safety
758/// - The pointer must have been created by expr_context_new()
759/// - The pointer must not be used after calling this function
760#[unsafe(no_mangle)]
761pub extern "C" fn expr_context_free(ctx: *mut ExprContext) {
762    if ctx.is_null() {
763        return;
764    }
765    unsafe {
766        let _ = Box::from_raw(ctx as *mut alloc::rc::Rc<EvalContext>);
767    }
768}
769
770/// Get the count of native functions in a context
771#[unsafe(no_mangle)]
772pub extern "C" fn expr_context_native_function_count(ctx: *const ExprContext) -> usize {
773    if ctx.is_null() {
774        return 0;
775    }
776
777    unsafe {
778        let ctx = &*(ctx as *const alloc::rc::Rc<EvalContext>);
779        ctx.list_native_functions().len()
780    }
781}
782
783/// Get a native function name by index
784/// Returns the length of the name, or 0 if index is out of bounds
785/// If buffer is NULL, just returns the length needed
786#[unsafe(no_mangle)]
787pub extern "C" fn expr_context_get_native_function_name(
788    ctx: *const ExprContext,
789    index: usize,
790    buffer: *mut u8,
791    buffer_size: usize,
792) -> usize {
793    if ctx.is_null() {
794        return 0;
795    }
796
797    unsafe {
798        let ctx = &*(ctx as *const alloc::rc::Rc<EvalContext>);
799        let functions = ctx.list_native_functions();
800
801        if index >= functions.len() {
802            return 0;
803        }
804
805        let name = &functions[index];
806        let name_bytes = name.as_bytes();
807
808        if buffer.is_null() {
809            return name_bytes.len();
810        }
811
812        let copy_len = core::cmp::min(name_bytes.len(), buffer_size);
813        core::ptr::copy_nonoverlapping(name_bytes.as_ptr(), buffer, copy_len);
814
815        name_bytes.len()
816    }
817}
818
819/// Add a native function to the context
820///
821/// # Parameters
822/// - `ctx`: The context
823/// - `name`: Function name (must be valid UTF-8)
824/// - `arity`: Number of arguments the function expects
825/// - `func`: Function pointer
826///
827/// # Returns
828/// 0 on success, non-zero on error
829#[unsafe(no_mangle)]
830pub extern "C" fn expr_context_add_function(
831    ctx: *mut ExprContext,
832    name: *const c_char,
833    arity: usize,
834    func: NativeFunc,
835) -> i32 {
836    if ctx.is_null() || name.is_null() {
837        return -1;
838    }
839
840    let ctx_handle = unsafe { &mut *(ctx as *mut alloc::rc::Rc<EvalContext>) };
841
842    let name_cstr = unsafe { CStr::from_ptr(name) };
843    let name_str = match name_cstr.to_str() {
844        Ok(s) => s,
845        Err(_) => return -2, // Invalid UTF-8
846    };
847
848    // Create a wrapper that calls the C function
849    let implementation = move |args: &[Real]| -> Real {
850        if args.len() != arity {
851            return Real::NAN;
852        }
853        func(args.as_ptr(), args.len())
854    };
855
856    // Get mutable access to register the function
857    match alloc::rc::Rc::get_mut(ctx_handle) {
858        Some(ctx_mut) => {
859            match ctx_mut.register_native_function(name_str, arity, implementation) {
860                Ok(_) => 0,
861                Err(_) => -3, // Registration failed
862            }
863        }
864        None => -4, // Cannot get mutable access
865    }
866}
867
868/// Add an expression function to a batch
869///
870/// Expression functions are mathematical expressions that can call other functions.
871/// They are specific to this batch and take precedence over context functions.
872///
873/// # Parameters
874/// - `batch`: The batch
875/// - `name`: Function name (must be valid UTF-8)
876/// - `params`: Comma-separated parameter names (e.g., "x,y,z")
877/// - `expression`: The expression string defining the function
878///
879/// # Returns
880/// 0 on success, non-zero on error
881#[unsafe(no_mangle)]
882pub extern "C" fn expr_batch_add_expression_function(
883    batch: *mut ExprBatch,
884    name: *const c_char,
885    params: *const c_char,
886    expression: *const c_char,
887) -> i32 {
888    if batch.is_null() || name.is_null() || params.is_null() || expression.is_null() {
889        return -1;
890    }
891
892    let wrapper = unsafe { &*(batch as *const BatchWithArena) };
893    let builder = unsafe { &mut *wrapper.batch };
894
895    // Parse strings
896    let name_cstr = unsafe { CStr::from_ptr(name) };
897    let name_str = match name_cstr.to_str() {
898        Ok(s) => s,
899        Err(_) => return -2, // Invalid UTF-8
900    };
901
902    let params_cstr = unsafe { CStr::from_ptr(params) };
903    let params_str = match params_cstr.to_str() {
904        Ok(s) => s,
905        Err(_) => return -2, // Invalid UTF-8
906    };
907
908    let expr_cstr = unsafe { CStr::from_ptr(expression) };
909    let expr_str = match expr_cstr.to_str() {
910        Ok(s) => s,
911        Err(_) => return -2, // Invalid UTF-8
912    };
913
914    // Split parameters by comma
915    let param_vec: Vec<&str> = if params_str.is_empty() {
916        Vec::new()
917    } else {
918        params_str.split(',').map(|s| s.trim()).collect()
919    };
920
921    // Register function
922    match builder.register_expression_function(name_str, &param_vec, expr_str) {
923        Ok(_) => 0,
924        Err(_) => -3, // Registration failed
925    }
926}
927
928/// Remove an expression function from a batch
929///
930/// # Parameters
931/// - `batch`: The batch
932/// - `name`: Function name to remove
933///
934/// # Returns
935/// - 1 if the function was removed
936/// - 0 if the function didn't exist
937/// - negative error code on failure
938#[unsafe(no_mangle)]
939pub extern "C" fn expr_batch_remove_expression_function(
940    batch: *mut ExprBatch,
941    name: *const c_char,
942) -> i32 {
943    if batch.is_null() || name.is_null() {
944        return -1;
945    }
946
947    let wrapper = unsafe { &*(batch as *const BatchWithArena) };
948    let builder = unsafe { &mut *wrapper.batch };
949
950    let name_cstr = unsafe { CStr::from_ptr(name) };
951    let name_str = match name_cstr.to_str() {
952        Ok(s) => s,
953        Err(_) => return -2, // Invalid UTF-8
954    };
955
956    match builder.unregister_expression_function(name_str) {
957        Ok(was_removed) => {
958            if was_removed {
959                1
960            } else {
961                0
962            }
963        }
964        Err(_) => -3, // Error
965    }
966}
967
968// ============================================================================
969// Arena Management - DEPRECATED (arena is now managed internally by batch)
970// ============================================================================
971
972// These functions are no longer needed as the batch now manages its own arena.
973// They are kept here commented out for reference.
974
975// /// Create a new memory arena
976// ///
977// /// # Parameters
978// /// - `size_hint`: Suggested size in bytes (0 for default)
979// ///
980// /// # Returns
981// /// Pointer to new arena, or NULL on allocation failure
982// ///
983// /// # Safety
984// /// The returned pointer must be freed with expr_arena_free()
985// #[unsafe(no_mangle)]
986// pub extern "C" fn expr_arena_new(size_hint: usize) -> *mut ExprArena {
987//     let size = if size_hint == 0 { 8192 } else { size_hint };
988//     let arena = Box::new(Bump::with_capacity(size));
989//     Box::into_raw(arena) as *mut ExprArena
990// }
991
992// /// Free a memory arena
993// ///
994// /// # Safety
995// /// - The pointer must have been created by expr_arena_new()
996// /// - All batches using this arena must be freed first
997// #[unsafe(no_mangle)]
998// pub extern "C" fn expr_arena_free(arena: *mut ExprArena) {
999//     if arena.is_null() {
1000//         return;
1001//     }
1002//     unsafe {
1003//         let _ = Box::from_raw(arena as *mut Bump);
1004//     }
1005// }
1006
1007// /// Reset an arena for reuse
1008// ///
1009// /// This clears all allocations but keeps the memory for reuse.
1010// ///
1011// /// # Safety
1012// /// No references to arena-allocated data must exist
1013// #[unsafe(no_mangle)]
1014// pub extern "C" fn expr_arena_reset(arena: *mut ExprArena) {
1015//     if arena.is_null() {
1016//         return;
1017//     }
1018//     let arena = unsafe { &mut *(arena as *mut Bump) };
1019//     arena.reset();
1020// }
1021
1022// ============================================================================
1023// Batch Evaluation (Primary API)
1024// ============================================================================
1025
1026/// Create a new expression batch with its own arena
1027///
1028/// This creates both an arena and a batch in a single allocation.
1029/// The arena is automatically sized based on the size_hint parameter.
1030///
1031/// # Parameters
1032/// - `size_hint`: Suggested arena size in bytes (0 for default of 8KB)
1033///
1034/// # Returns
1035/// Pointer to new batch, or NULL on failure
1036///
1037/// # Safety
1038/// - The returned pointer must be freed with expr_batch_free()
1039#[unsafe(no_mangle)]
1040pub extern "C" fn expr_batch_new(size_hint: usize) -> *mut ExprBatch {
1041    // Use default size if 0 is passed
1042    let arena_size = if size_hint == 0 { 8192 } else { size_hint };
1043
1044    // Create the arena and leak it to get a 'static reference
1045    let arena = Box::new(Bump::with_capacity(arena_size));
1046    let arena_ptr = Box::into_raw(arena);
1047    let arena_ref: &'static Bump = unsafe { &*arena_ptr };
1048
1049    // Create the batch with the leaked arena reference
1050    let batch = Box::new(Expression::new(arena_ref));
1051    let batch_ptr = Box::into_raw(batch);
1052
1053    // Create the wrapper that tracks both pointers for cleanup
1054    let wrapper = Box::new(BatchWithArena {
1055        magic: BATCH_MAGIC,
1056        arena: arena_ptr,
1057        batch: batch_ptr,
1058    });
1059
1060    Box::into_raw(wrapper) as *mut ExprBatch
1061}
1062
1063/// Check if a batch pointer is valid (not freed or corrupted)
1064///
1065/// # Parameters
1066/// - `batch`: The batch pointer to check
1067///
1068/// # Returns
1069/// - ExprResult with status 0 and value 1.0 if the batch is valid
1070/// - ExprResult with error status and message describing the issue if invalid
1071///
1072/// # Safety
1073/// The pointer should have been created by expr_batch_new()
1074#[unsafe(no_mangle)]
1075pub extern "C" fn expr_batch_is_valid(batch: *const ExprBatch) -> ExprResult {
1076    if batch.is_null() {
1077        return ExprResult::from_ffi_error(FFI_ERROR_NULL_POINTER, "Batch pointer is NULL");
1078    }
1079
1080    unsafe {
1081        let wrapper = batch as *const BatchWithArena;
1082        let magic = (*wrapper).magic;
1083
1084        if magic == BATCH_MAGIC {
1085            // Valid batch - return success with value 1.0
1086            ExprResult::success_value(1.0)
1087        } else if magic == BATCH_FREED {
1088            // Batch has been freed
1089            ExprResult::from_ffi_error(
1090                FFI_ERROR_INVALID_POINTER,
1091                "Batch has already been freed (double-free detected)",
1092            )
1093        } else {
1094            // Invalid/corrupted pointer
1095            // Use a static message since format! isn't available in no_std
1096            ExprResult::from_ffi_error(
1097                FFI_ERROR_INVALID_POINTER,
1098                "Invalid or corrupted batch pointer",
1099            )
1100        }
1101    }
1102}
1103
1104/// Free an expression batch and its arena
1105///
1106/// This frees both the batch and its associated arena in one operation.
1107///
1108/// # Safety
1109/// The pointer must have been created by expr_batch_new()
1110#[unsafe(no_mangle)]
1111pub extern "C" fn expr_batch_free(batch: *mut ExprBatch) {
1112    if batch.is_null() {
1113        return;
1114    }
1115
1116    unsafe {
1117        // Check the magic number to detect double-free
1118        let wrapper = batch as *mut BatchWithArena;
1119        let magic = (*wrapper).magic;
1120
1121        if magic == BATCH_FREED {
1122            // Already freed - this is a double-free attempt
1123            // In debug builds, we could panic here. In release, just return safely.
1124            #[cfg(debug_assertions)]
1125            panic!("Double-free detected on ExprBatch at {:p}", batch);
1126
1127            #[cfg(not(debug_assertions))]
1128            return; // Silently ignore in release mode
1129        }
1130
1131        if magic != BATCH_MAGIC {
1132            // Invalid magic - this pointer wasn't created by expr_batch_new
1133            // or memory corruption occurred
1134            #[cfg(debug_assertions)]
1135            panic!(
1136                "Invalid ExprBatch pointer at {:p} (magic: 0x{:x})",
1137                batch, magic
1138            );
1139
1140            #[cfg(not(debug_assertions))]
1141            return; // Silently ignore in release mode
1142        }
1143
1144        // Valid batch - proceed with cleanup
1145        let _ = Box::from_raw(wrapper);
1146    }
1147}
1148
1149/// Clear all expressions, parameters, and results from a batch
1150///
1151/// This allows the batch to be reused without recreating it. The arena memory
1152/// used by previous expressions remains allocated but unused until the arena
1153/// is reset. This is safer than freeing and recreating the batch.
1154///
1155/// # Parameters
1156/// - `batch`: The batch to clear
1157///
1158/// # Returns
1159/// 0 on success, negative error code on failure
1160///
1161/// # Safety
1162/// The pointer must have been created by expr_batch_new()
1163#[unsafe(no_mangle)]
1164pub extern "C" fn expr_batch_clear(batch: *mut ExprBatch) -> i32 {
1165    if batch.is_null() {
1166        return FFI_ERROR_NULL_POINTER;
1167    }
1168
1169    unsafe {
1170        let wrapper = &mut *(batch as *mut BatchWithArena);
1171
1172        // Validate magic number
1173        if wrapper.magic != BATCH_MAGIC {
1174            #[cfg(debug_assertions)]
1175            panic!(
1176                "Invalid or freed ExprBatch pointer at {:p} (magic: 0x{:x})",
1177                batch, wrapper.magic
1178            );
1179
1180            #[cfg(not(debug_assertions))]
1181            return FFI_ERROR_INVALID_POINTER; // Return error in release mode
1182        }
1183
1184        (*wrapper.batch).clear();
1185    }
1186
1187    0
1188}
1189
1190/// Add an expression to the batch
1191///
1192/// # Parameters
1193/// - `batch`: The batch
1194/// - `expr`: Expression string (must be valid UTF-8)
1195///
1196/// # Returns
1197/// ExprResult with index on success, or error details on failure
1198#[unsafe(no_mangle)]
1199pub extern "C" fn expr_batch_add_expression(
1200    batch: *mut ExprBatch,
1201    expr: *const c_char,
1202) -> ExprResult {
1203    if batch.is_null() || expr.is_null() {
1204        return ExprResult::from_ffi_error(
1205            FFI_ERROR_NULL_POINTER,
1206            "Null pointer passed to expr_batch_add_expression",
1207        );
1208    }
1209
1210    let wrapper = unsafe { &*(batch as *const BatchWithArena) };
1211    let builder = unsafe { &mut *wrapper.batch };
1212
1213    let expr_cstr = unsafe { CStr::from_ptr(expr) };
1214    let expr_str = match expr_cstr.to_str() {
1215        Ok(s) => s,
1216        Err(_) => {
1217            return ExprResult::from_ffi_error(
1218                FFI_ERROR_INVALID_UTF8,
1219                "Invalid UTF-8 in expression string",
1220            );
1221        }
1222    };
1223
1224    match builder.add_expression(expr_str) {
1225        Ok(idx) => ExprResult::success_index(idx),
1226        Err(e) => ExprResult::from_expr_error(e),
1227    }
1228}
1229
1230/// Add a variable to the batch
1231///
1232/// # Parameters
1233/// - `batch`: The batch
1234/// - `name`: Variable name (must be valid UTF-8)
1235/// - `value`: Initial value
1236///
1237/// # Returns
1238/// ExprResult with index on success, or error details on failure
1239#[unsafe(no_mangle)]
1240pub extern "C" fn expr_batch_add_variable(
1241    batch: *mut ExprBatch,
1242    name: *const c_char,
1243    value: Real,
1244) -> ExprResult {
1245    if batch.is_null() || name.is_null() {
1246        return ExprResult::from_ffi_error(
1247            FFI_ERROR_NULL_POINTER,
1248            "Null pointer passed to expr_batch_add_variable",
1249        );
1250    }
1251
1252    let wrapper = unsafe { &*(batch as *const BatchWithArena) };
1253    let builder = unsafe { &mut *wrapper.batch };
1254
1255    let name_cstr = unsafe { CStr::from_ptr(name) };
1256    let name_str = match name_cstr.to_str() {
1257        Ok(s) => s,
1258        Err(_) => {
1259            return ExprResult::from_ffi_error(
1260                FFI_ERROR_INVALID_UTF8,
1261                "Invalid UTF-8 in variable name",
1262            );
1263        }
1264    };
1265
1266    match builder.add_parameter(name_str, value) {
1267        Ok(idx) => ExprResult::success_index(idx),
1268        Err(e) => ExprResult::from_expr_error(e),
1269    }
1270}
1271
1272/// Update a variable value by index
1273///
1274/// # Parameters
1275/// - `batch`: The batch
1276/// - `index`: Variable index from expr_batch_add_variable()
1277/// - `value`: New value
1278///
1279/// # Returns
1280/// 0 on success, negative error code on failure
1281#[unsafe(no_mangle)]
1282pub extern "C" fn expr_batch_set_variable(batch: *mut ExprBatch, index: usize, value: Real) -> i32 {
1283    if batch.is_null() {
1284        return -1;
1285    }
1286
1287    let wrapper = unsafe { &*(batch as *const BatchWithArena) };
1288    let builder = unsafe { &mut *wrapper.batch };
1289
1290    match builder.set_param(index, value) {
1291        Ok(_) => 0,
1292        Err(_) => -2, // Invalid index
1293    }
1294}
1295
1296/// Evaluate all expressions in the batch
1297///
1298/// # Parameters
1299/// - `batch`: The batch
1300/// - `ctx`: Optional context with functions (can be NULL)
1301///
1302/// # Returns
1303/// 0 on success, negative error code on failure
1304#[unsafe(no_mangle)]
1305pub extern "C" fn expr_batch_evaluate(batch: *mut ExprBatch, ctx: *mut ExprContext) -> i32 {
1306    if batch.is_null() {
1307        return -1;
1308    }
1309
1310    let wrapper = unsafe { &*(batch as *const BatchWithArena) };
1311    let builder = unsafe { &mut *wrapper.batch };
1312
1313    let eval_ctx = if ctx.is_null() {
1314        alloc::rc::Rc::new(EvalContext::new())
1315    } else {
1316        unsafe {
1317            let ctx_rc = &*(ctx as *const alloc::rc::Rc<EvalContext>);
1318            ctx_rc.clone()
1319        }
1320    };
1321
1322    match builder.eval(&eval_ctx) {
1323        Ok(_) => 0,
1324        Err(_) => -2, // Evaluation error
1325    }
1326}
1327
1328/// Get the result of an expression
1329///
1330/// # Parameters
1331/// - `batch`: The batch
1332/// - `index`: Expression index from expr_batch_add_expression()
1333///
1334/// # Returns
1335/// Result value, or NaN if index is invalid
1336#[unsafe(no_mangle)]
1337pub extern "C" fn expr_batch_get_result(batch: *const ExprBatch, index: usize) -> Real {
1338    if batch.is_null() {
1339        return Real::NAN;
1340    }
1341
1342    let wrapper = unsafe { &*(batch as *const BatchWithArena) };
1343    let builder = unsafe { &*wrapper.batch };
1344    builder.get_result(index).unwrap_or(Real::NAN)
1345}
1346
1347/// Get the high water mark of arena memory usage for a batch
1348///
1349/// # Parameters
1350/// - `batch`: The batch
1351///
1352/// # Returns
1353/// Number of bytes currently allocated in the batch's arena.
1354/// This represents the maximum memory usage of the arena.
1355#[unsafe(no_mangle)]
1356pub extern "C" fn expr_batch_arena_bytes(batch: *const ExprBatch) -> usize {
1357    if batch.is_null() {
1358        return 0;
1359    }
1360
1361    let wrapper = unsafe { &*(batch as *const BatchWithArena) };
1362    let builder = unsafe { &*wrapper.batch };
1363    builder.arena_allocated_bytes()
1364}
1365
1366/// Evaluate all expressions in the batch with detailed error reporting
1367///
1368/// # Parameters
1369/// - `batch`: The batch
1370/// - `ctx`: Optional context with functions (can be NULL)
1371///
1372/// # Returns
1373/// ExprResult with status 0 on success, or error details on failure
1374#[unsafe(no_mangle)]
1375pub extern "C" fn expr_batch_evaluate_ex(
1376    batch: *mut ExprBatch,
1377    ctx: *mut ExprContext,
1378) -> ExprResult {
1379    if batch.is_null() {
1380        return ExprResult::from_ffi_error(FFI_ERROR_NULL_POINTER, "Null batch pointer");
1381    }
1382
1383    let wrapper = unsafe { &*(batch as *const BatchWithArena) };
1384    let builder = unsafe { &mut *wrapper.batch };
1385
1386    let eval_ctx = if ctx.is_null() {
1387        alloc::rc::Rc::new(EvalContext::new())
1388    } else {
1389        unsafe {
1390            let ctx_rc = &*(ctx as *const alloc::rc::Rc<EvalContext>);
1391            ctx_rc.clone()
1392        }
1393    };
1394
1395    match builder.eval(&eval_ctx) {
1396        Ok(_) => ExprResult::success_value(0.0), // No specific value for batch eval
1397        Err(e) => ExprResult::from_expr_error(e),
1398    }
1399}
1400
1401// ============================================================================
1402// Utility Functions
1403// ============================================================================
1404
1405/// Estimate arena size needed for expressions
1406///
1407/// # Parameters
1408/// - `expression_count`: Number of expressions
1409/// - `total_expr_length`: Total length of all expression strings
1410/// - `param_count`: Number of parameters
1411/// - `estimated_iterations`: Estimated evaluation iterations
1412///
1413/// # Returns
1414/// Recommended arena size in bytes
1415#[unsafe(no_mangle)]
1416pub extern "C" fn expr_estimate_arena_size(
1417    expression_count: usize,
1418    total_expr_length: usize,
1419    param_count: usize,
1420    _estimated_iterations: usize,
1421) -> usize {
1422    // Base overhead per expression (AST nodes, etc)
1423    let expr_overhead = expression_count * 512;
1424
1425    // String storage
1426    let string_storage = total_expr_length * 2;
1427
1428    // Parameter storage
1429    let param_storage = param_count * 64;
1430
1431    // Add 50% buffer
1432    let total = expr_overhead + string_storage + param_storage;
1433    total + (total / 2)
1434}
1435
1436// ============================================================================
1437// Test-only Panic Trigger
1438// ============================================================================
1439
1440/// Force a panic for testing purposes (only available in debug builds)
1441#[cfg(debug_assertions)]
1442#[unsafe(no_mangle)]
1443pub extern "C" fn exp_rs_test_trigger_panic() {
1444    panic!("Test panic triggered from C");
1445}
1446
1447// ============================================================================
1448// Panic Handler Implementation
1449// ============================================================================
1450
1451/// Panic handler for no_std environments (ARM targets)
1452#[cfg(all(not(test), target_arch = "arm"))]
1453#[panic_handler]
1454fn panic(info: &core::panic::PanicInfo) -> ! {
1455    // Try to set the panic flag to let C code know about the panic
1456    unsafe {
1457        if !EXP_RS_PANIC_FLAG.is_null() {
1458            *EXP_RS_PANIC_FLAG = 1;
1459        }
1460
1461        // Try to log if we have a logging function
1462        if !EXP_RS_LOG_FUNCTION.is_null() {
1463            // Cast the raw pointer to a function pointer and call it
1464            let log_func: LogFunctionType = core::mem::transmute(EXP_RS_LOG_FUNCTION);
1465
1466            // Try to extract panic information
1467            // Note: The .message() method was removed in newer Rust versions
1468            // We'll use location information which is more stable
1469            if let Some(location) = info.location() {
1470                // Create a simple message with file and line info
1471                let file = location.file();
1472                let _line = location.line(); // We have line number but can't easily format it in no_std
1473
1474                // Log the file path first
1475                log_func(file.as_ptr(), file.len());
1476
1477                // In a no_std environment, we can't easily format strings with line numbers
1478                // The C side logger can at least see which file panicked
1479            } else {
1480                // Fallback to default message
1481                log_func(PANIC_DEFAULT_MSG.as_ptr(), PANIC_DEFAULT_MSG.len() - 1);
1482            }
1483        }
1484    }
1485
1486    // Trigger a fault that the debugger can catch
1487    #[cfg(target_arch = "arm")]
1488    loop {
1489        unsafe {
1490            // Trigger a HardFault by executing an undefined instruction
1491            // This allows the debugger to catch the fault and inspect the state
1492            core::arch::asm!("udf #0");
1493        }
1494        // If the fault handler returns, we'll trigger it again
1495        // This prevents execution from continuing past the panic
1496    }
1497
1498    // Fallback for non-ARM architectures
1499    #[cfg(not(target_arch = "arm"))]
1500    loop {
1501        // Busy loop for debugging - debugger can break here
1502        core::hint::spin_loop();
1503    }
1504}
1505
1506#[cfg(test)]
1507mod tests {
1508    use super::*;
1509
1510    #[test]
1511    fn test_error_buffer_null_termination() {
1512        use core::ffi::c_char;
1513
1514        // Test normal message (well within buffer size)
1515        let short_msg = "Test error message";
1516        let buffer = ExprResult::copy_to_error_buffer(short_msg);
1517
1518        // Find the null terminator
1519        let mut found_null = false;
1520        for (i, &byte) in buffer.iter().enumerate() {
1521            if byte == 0 {
1522                found_null = true;
1523                // Verify the message is correct up to null terminator
1524                let recovered_msg = unsafe {
1525                    core::str::from_utf8_unchecked(core::slice::from_raw_parts(
1526                        buffer.as_ptr() as *const u8,
1527                        i,
1528                    ))
1529                };
1530                assert_eq!(recovered_msg, short_msg);
1531                break;
1532            }
1533        }
1534        assert!(found_null, "Error buffer should be null terminated");
1535
1536        // Test maximum length message (exactly buffer size - 1)
1537        let max_msg = "a".repeat(crate::types::EXP_RS_ERROR_BUFFER_SIZE - 1);
1538        let buffer = ExprResult::copy_to_error_buffer(&max_msg);
1539
1540        // Last byte should be null terminator
1541        assert_eq!(buffer[crate::types::EXP_RS_ERROR_BUFFER_SIZE - 1], 0);
1542
1543        // Second-to-last byte should contain message data
1544        assert_eq!(
1545            buffer[crate::types::EXP_RS_ERROR_BUFFER_SIZE - 2],
1546            b'a' as c_char
1547        );
1548
1549        // Test over-length message (gets truncated)
1550        let long_msg = "a".repeat(crate::types::EXP_RS_ERROR_BUFFER_SIZE + 10);
1551        let buffer = ExprResult::copy_to_error_buffer(&long_msg);
1552
1553        // Last byte should still be null terminator
1554        assert_eq!(buffer[crate::types::EXP_RS_ERROR_BUFFER_SIZE - 1], 0);
1555
1556        // Message should be truncated but still valid
1557        let recovered_msg = unsafe {
1558            core::str::from_utf8_unchecked(core::slice::from_raw_parts(
1559                buffer.as_ptr() as *const u8,
1560                crate::types::EXP_RS_ERROR_BUFFER_SIZE - 1,
1561            ))
1562        };
1563        assert_eq!(
1564            recovered_msg.len(),
1565            crate::types::EXP_RS_ERROR_BUFFER_SIZE - 1
1566        );
1567        assert!(recovered_msg.chars().all(|c| c == 'a'));
1568    }
1569}