1use crate::expression::Expression;
72use crate::{EvalContext, Real};
73use alloc::boxed::Box;
74use alloc::string::ToString;
75use alloc::vec::Vec;
76use bumpalo::Bump;
77use core::ffi::{CStr, c_char, c_void};
78use core::ptr;
79
80pub use crate::expression::Expression as ExpressionExport;
82
83const BATCH_MAGIC: usize = 0x7A9F4E82; const BATCH_FREED: usize = 0x9C2E8B7D; struct BatchWithArena {
90 magic: usize, arena: *mut Bump, batch: *mut Expression<'static>, }
94
95impl Drop for BatchWithArena {
96 fn drop(&mut self) {
97 self.magic = BATCH_FREED;
99
100 if !self.batch.is_null() {
102 unsafe {
103 drop(Box::from_raw(self.batch));
105 }
106 self.batch = ptr::null_mut();
107 }
108 if !self.arena.is_null() {
110 unsafe {
111 let mut arena_box = Box::from_raw(self.arena);
113 arena_box.reset();
115 drop(arena_box);
117 }
118 self.arena = ptr::null_mut();
119 }
120 }
121}
122
123#[cfg(feature = "alloc_tracking")]
129static TOTAL_ALLOCATED: AtomicUsize = AtomicUsize::new(0);
130#[cfg(feature = "alloc_tracking")]
131static TOTAL_FREED: AtomicUsize = AtomicUsize::new(0);
132#[cfg(feature = "alloc_tracking")]
133static ALLOCATION_COUNT: AtomicUsize = AtomicUsize::new(0);
134#[cfg(feature = "alloc_tracking")]
135static FREE_COUNT: AtomicUsize = AtomicUsize::new(0);
136
137#[cfg(feature = "alloc_tracking")]
139mod allocation_tracking {
140 use core::cell::RefCell;
141 use critical_section::Mutex;
142 use heapless::{FnvIndexMap, Vec};
143
144 #[derive(Clone, Copy)]
145 pub struct AllocationInfo {
146 pub size: usize,
147 pub line: u32,
148 pub file: &'static str,
149 pub ptr: usize,
150 pub caller_addr: usize, pub caller2_addr: usize, }
153
154 #[cfg(target_arch = "arm")]
156 unsafe fn get_caller_addresses() -> (usize, usize) {
157 let lr: usize; unsafe {
160 core::arch::asm!("mov {}, lr", out(reg) lr);
162 }
163
164 (lr, 0)
166 }
167
168 #[cfg(not(target_arch = "arm"))]
170 unsafe fn get_caller_addresses() -> (usize, usize) {
171 (0, 0) }
173
174 const MAX_TRACKED_ALLOCATIONS: usize = 512;
175 type TrackedAllocations = FnvIndexMap<usize, AllocationInfo, MAX_TRACKED_ALLOCATIONS>;
176
177 static TRACKED_ALLOCATIONS: Mutex<RefCell<TrackedAllocations>> =
178 Mutex::new(RefCell::new(TrackedAllocations::new()));
179
180 pub fn track_allocation(ptr: *mut u8, size: usize, location: &'static core::panic::Location) {
181 if ptr.is_null() {
182 return;
183 }
184
185 let (caller_addr, caller2_addr) = unsafe { get_caller_addresses() };
187
188 let info = AllocationInfo {
189 size,
190 line: location.line(),
191 file: location.file(),
192 ptr: ptr as usize,
193 caller_addr,
194 caller2_addr,
195 };
196
197 critical_section::with(|cs| {
198 let mut tracked = TRACKED_ALLOCATIONS.borrow(cs).borrow_mut();
199 let _ = tracked.insert(ptr as usize, info);
201 });
202 }
203
204 pub fn untrack_allocation(ptr: *mut u8) {
205 if ptr.is_null() {
206 return;
207 }
208
209 critical_section::with(|cs| {
210 let mut tracked = TRACKED_ALLOCATIONS.borrow(cs).borrow_mut();
211 tracked.remove(&(ptr as usize));
212 });
213 }
214
215 pub fn get_remaining_allocations() -> Vec<AllocationInfo, MAX_TRACKED_ALLOCATIONS> {
216 critical_section::with(|cs| {
217 let tracked = TRACKED_ALLOCATIONS.borrow(cs).borrow();
218 let mut result = Vec::new();
219 for (_, info) in tracked.iter() {
220 let _ = result.push(*info);
221 }
222 result
223 })
224 }
225}
226
227#[cfg(feature = "custom_cbindgen_alloc")]
229mod embedded_allocator {
230 use super::*;
231 use core::alloc::{GlobalAlloc, Layout};
232 use core::sync::atomic::{AtomicUsize, Ordering};
233 use embedded_alloc::TlsfHeap;
234
235 use core::sync::atomic::AtomicBool;
236
237 pub struct TrackingHeap {
239 heap: TlsfHeap,
240 initialized: AtomicBool,
241 }
242
243 impl TrackingHeap {
244 pub const fn new() -> Self {
245 Self {
246 heap: TlsfHeap::empty(),
247 initialized: AtomicBool::new(false),
248 }
249 }
250
251 pub fn is_initialized(&self) -> bool {
252 self.initialized.load(Ordering::Acquire)
253 }
254
255 pub unsafe fn init(&self, start_addr: usize, size: usize) {
256 unsafe {
257 self.heap.init(start_addr, size);
258 }
259 self.initialized.store(true, Ordering::Release);
260 }
261
262 fn ensure_initialized(&self) {
264 if !self.initialized.load(Ordering::Acquire) {
265 panic!("Heap not initialized! Call exp_rs_heap_init() before any allocations");
267 }
268 }
269 }
270
271 unsafe impl GlobalAlloc for TrackingHeap {
272 #[track_caller]
273 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
274 self.ensure_initialized();
275 let ptr = unsafe { self.heap.alloc(layout) };
276 if !ptr.is_null() {
277 #[cfg(feature = "alloc_tracking")]
278 {
279 TOTAL_ALLOCATED.fetch_add(layout.size(), Ordering::Relaxed);
280 ALLOCATION_COUNT.fetch_add(1, Ordering::Relaxed);
281 let location = core::panic::Location::caller();
284 allocation_tracking::track_allocation(ptr, layout.size(), location);
285 }
286 }
287 ptr
288 }
289
290 #[track_caller]
291 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
292 self.ensure_initialized();
293 unsafe {
294 self.heap.dealloc(ptr, layout);
295 }
296 #[cfg(feature = "alloc_tracking")]
297 {
298 TOTAL_FREED.fetch_add(layout.size(), Ordering::Relaxed);
299 FREE_COUNT.fetch_add(1, Ordering::Relaxed);
300 }
301
302 #[cfg(feature = "alloc_tracking")]
304 {
305 allocation_tracking::untrack_allocation(ptr);
306 }
307 }
308 }
309
310 #[global_allocator]
312 pub static HEAP: TrackingHeap = TrackingHeap::new();
313
314 pub static CURRENT_HEAP_SIZE: AtomicUsize = AtomicUsize::new(0);
318}
319
320#[cfg(not(feature = "custom_cbindgen_alloc"))]
322mod system_allocator {
323 extern crate std;
324 use std::alloc::{GlobalAlloc, Layout, System};
325
326 pub struct TrackingSystemHeap;
328
329 unsafe impl GlobalAlloc for TrackingSystemHeap {
330 #[track_caller]
331 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
332 let ptr = unsafe { System.alloc(layout) };
333 #[cfg(feature = "alloc_tracking")]
334 if !ptr.is_null() {
335 TOTAL_ALLOCATED.fetch_add(layout.size(), Ordering::Relaxed);
336 ALLOCATION_COUNT.fetch_add(1, Ordering::Relaxed);
337 let location = core::panic::Location::caller();
338 allocation_tracking::track_allocation(ptr, layout.size(), location);
339 }
340 ptr
341 }
342
343 #[track_caller]
344 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
345 unsafe {
346 System.dealloc(ptr, layout);
347 }
348 #[cfg(feature = "alloc_tracking")]
349 {
350 TOTAL_FREED.fetch_add(layout.size(), Ordering::Relaxed);
351 FREE_COUNT.fetch_add(1, Ordering::Relaxed);
352 allocation_tracking::untrack_allocation(ptr);
353 }
354 }
355 }
356
357 #[global_allocator]
359 pub static HEAP: TrackingSystemHeap = TrackingSystemHeap;
360}
361
362#[cfg(feature = "custom_cbindgen_alloc")]
365#[unsafe(no_mangle)]
366pub extern "C" fn exp_rs_heap_init(heap_ptr: *mut u8, heap_size: usize) -> i32 {
367 use embedded_allocator::*;
368
369 if heap_ptr.is_null() {
371 return -1; }
373 if heap_size == 0 {
374 return -3; }
376
377 if HEAP.is_initialized() {
379 return -2; }
381
382 unsafe {
383 HEAP.init(heap_ptr as usize, heap_size);
384 CURRENT_HEAP_SIZE.store(heap_size, core::sync::atomic::Ordering::Release);
385 }
386 0
387}
388
389#[cfg(feature = "custom_cbindgen_alloc")]
391#[unsafe(no_mangle)]
392pub extern "C" fn exp_rs_get_heap_size() -> usize {
393 embedded_allocator::CURRENT_HEAP_SIZE.load(core::sync::atomic::Ordering::Acquire)
394}
395
396#[cfg(feature = "alloc_tracking")]
398#[unsafe(no_mangle)]
399pub extern "C" fn exp_rs_get_total_allocated() -> usize {
400 TOTAL_ALLOCATED.load(Ordering::Relaxed)
401}
402
403#[cfg(feature = "alloc_tracking")]
404#[unsafe(no_mangle)]
405pub extern "C" fn exp_rs_get_total_freed() -> usize {
406 TOTAL_FREED.load(Ordering::Relaxed)
407}
408
409#[cfg(feature = "alloc_tracking")]
410#[unsafe(no_mangle)]
411pub extern "C" fn exp_rs_get_allocation_count() -> usize {
412 ALLOCATION_COUNT.load(Ordering::Relaxed)
413}
414
415#[cfg(feature = "alloc_tracking")]
416#[unsafe(no_mangle)]
417pub extern "C" fn exp_rs_get_free_count() -> usize {
418 FREE_COUNT.load(Ordering::Relaxed)
419}
420
421#[cfg(feature = "alloc_tracking")]
422#[unsafe(no_mangle)]
423pub extern "C" fn exp_rs_get_current_allocated() -> usize {
424 let allocated = TOTAL_ALLOCATED.load(Ordering::Relaxed);
425 let freed = TOTAL_FREED.load(Ordering::Relaxed);
426 allocated.saturating_sub(freed)
427}
428
429#[cfg(feature = "alloc_tracking")]
431#[repr(C)]
432#[derive(Clone, Copy)]
433pub struct CAllocationInfo {
434 pub size: usize,
435 pub line: u32,
436 pub file_ptr: *const c_char,
437 pub ptr: usize,
438 pub caller_addr: usize, pub caller2_addr: usize, }
441
442#[cfg(feature = "alloc_tracking")]
444#[unsafe(no_mangle)]
445pub extern "C" fn exp_rs_get_remaining_allocation_count() -> usize {
446 use allocation_tracking::*;
447 let remaining = get_remaining_allocations();
448 remaining.len()
449}
450
451#[cfg(feature = "alloc_tracking")]
458#[unsafe(no_mangle)]
459pub extern "C" fn exp_rs_get_remaining_allocation_by_index(allocation_index: usize) -> ExprResult {
460 use allocation_tracking::*;
461 let remaining = get_remaining_allocations();
462
463 if allocation_index >= remaining.len() {
464 return ExprResult::from_ffi_error(-1, "Allocation index out of bounds");
465 }
466
467 let allocation = &remaining[allocation_index];
468
469 let info_str = allocation.file;
472
473 ExprResult {
474 status: 0,
475 value: allocation.size as Real,
476 index: allocation.line as i32,
477 error: ExprResult::copy_to_error_buffer(info_str),
478 }
479}
480
481#[cfg(feature = "alloc_tracking")]
485#[unsafe(no_mangle)]
486pub extern "C" fn exp_rs_get_remaining_allocations(
487 output_buffer: *mut CAllocationInfo,
488 buffer_size: usize,
489) -> usize {
490 use allocation_tracking::*;
491 let remaining = get_remaining_allocations();
492
493 if output_buffer.is_null() {
494 return remaining.len();
495 }
496
497 let copy_count = core::cmp::min(remaining.len(), buffer_size);
498
499 for (i, allocation) in remaining.iter().enumerate().take(copy_count) {
500 unsafe {
501 let c_info = CAllocationInfo {
502 size: allocation.size,
503 line: allocation.line,
504 file_ptr: allocation.file.as_ptr() as *const c_char,
505 ptr: allocation.ptr,
506 caller_addr: allocation.caller_addr,
507 caller2_addr: allocation.caller2_addr,
508 };
509 output_buffer.add(i).write(c_info);
510 }
511 }
512
513 copy_count
514}
515
516#[cfg(all(target_arch = "arm", not(test)))]
518#[unsafe(no_mangle)]
519fn _critical_section_1_0_acquire() -> critical_section::RawRestoreState {
520 let primask: u32;
522 unsafe {
523 core::arch::asm!("mrs {}, primask", out(reg) primask);
524 core::arch::asm!("cpsid i");
525 }
526 primask
527}
528
529#[cfg(all(target_arch = "arm", not(test)))]
530#[unsafe(no_mangle)]
531unsafe fn _critical_section_1_0_release(restore_state: critical_section::RawRestoreState) {
532 if restore_state & 1 == 0 {
534 unsafe {
536 core::arch::asm!("cpsie i");
537 }
538 }
539 }
541
542#[cfg(not(all(target_arch = "arm", not(test))))]
544#[unsafe(no_mangle)]
545fn _critical_section_1_0_acquire() -> critical_section::RawRestoreState {
546 0
549}
550
551#[cfg(not(all(target_arch = "arm", not(test))))]
552#[unsafe(no_mangle)]
553unsafe fn _critical_section_1_0_release(_restore_state: critical_section::RawRestoreState) {
554 }
556
557#[allow(dead_code)]
563static mut EXP_RS_PANIC_FLAG: *mut i32 = ptr::null_mut();
564
565#[allow(dead_code)]
567static mut EXP_RS_LOG_FUNCTION: *const c_void = ptr::null();
568
569#[allow(dead_code)]
571type LogFunctionType = unsafe extern "C" fn(*const u8, usize);
572
573#[allow(dead_code)]
575static PANIC_DEFAULT_MSG: &[u8] = b"Rust panic occurred\0";
576
577#[cfg(not(test))]
586#[unsafe(no_mangle)]
587pub unsafe extern "C" fn exp_rs_register_panic_handler(
588 flag_ptr: *mut i32,
589 log_func: *const c_void,
590) {
591 unsafe {
592 EXP_RS_PANIC_FLAG = flag_ptr;
593 EXP_RS_LOG_FUNCTION = log_func;
594 }
595}
596
597#[repr(C)]
603pub struct ExprResult {
604 status: i32,
606 value: Real,
608 index: i32,
610 error: [c_char; crate::types::EXP_RS_ERROR_BUFFER_SIZE],
612}
613
614impl ExprResult {
615 fn copy_to_error_buffer(msg: &str) -> [c_char; crate::types::EXP_RS_ERROR_BUFFER_SIZE] {
617 let mut buffer = [0; crate::types::EXP_RS_ERROR_BUFFER_SIZE];
618 let bytes = msg.as_bytes();
619 let copy_len = core::cmp::min(bytes.len(), crate::types::EXP_RS_ERROR_BUFFER_SIZE - 1);
620
621 for i in 0..copy_len {
622 buffer[i] = bytes[i] as c_char;
623 }
624 buffer[copy_len] = 0; buffer
626 }
627 fn success_value(value: Real) -> Self {
629 ExprResult {
630 status: 0,
631 value,
632 index: 0,
633 error: [0; crate::types::EXP_RS_ERROR_BUFFER_SIZE],
634 }
635 }
636
637 fn success_index(index: usize) -> Self {
639 ExprResult {
640 status: 0,
641 value: 0.0,
642 index: index as i32,
643 error: [0; crate::types::EXP_RS_ERROR_BUFFER_SIZE],
644 }
645 }
646
647 fn from_expr_error(err: crate::error::ExprError) -> Self {
649 let error_code = err.error_code();
650 let error_msg = err.to_string(); ExprResult {
653 status: error_code,
654 value: Real::NAN,
655 index: -1,
656 error: Self::copy_to_error_buffer(&error_msg),
657 }
658 }
659
660 fn from_ffi_error(code: i32, msg: &str) -> Self {
662 ExprResult {
663 status: code,
664 value: Real::NAN,
665 index: -1,
666 error: Self::copy_to_error_buffer(msg),
667 }
668 }
669}
670
671pub const FFI_ERROR_NULL_POINTER: i32 = -1;
673pub const FFI_ERROR_INVALID_UTF8: i32 = -2;
674pub const FFI_ERROR_NO_ARENA_AVAILABLE: i32 = -3;
675pub const FFI_ERROR_CANNOT_GET_MUTABLE_ACCESS: i32 = -4;
676pub const FFI_ERROR_INVALID_POINTER: i32 = -5;
677
678#[repr(C)]
684pub struct ExprContext {
685 _private: [u8; 0],
686}
687
688#[repr(C)]
690pub struct ExprBatch {
691 _private: [u8; 0],
692}
693
694#[repr(C)]
696pub struct ExprArena {
697 _private: [u8; 0],
698}
699
700pub type NativeFunc = extern "C" fn(args: *const Real, n_args: usize) -> Real;
706
707#[unsafe(no_mangle)]
721pub extern "C" fn expr_context_new() -> *mut ExprContext {
722 let ctx = EvalContext::new();
723 let ctx_rc = alloc::rc::Rc::new(ctx);
724 let ctx = Box::new(ctx_rc);
725 Box::into_raw(ctx) as *mut ExprContext
726}
727
728#[unsafe(no_mangle)]
748pub extern "C" fn expr_context_new_empty() -> *mut ExprContext {
749 let ctx = EvalContext::empty();
750 let ctx_rc = alloc::rc::Rc::new(ctx);
751 let ctx = Box::new(ctx_rc);
752 Box::into_raw(ctx) as *mut ExprContext
753}
754
755#[unsafe(no_mangle)]
761pub extern "C" fn expr_context_free(ctx: *mut ExprContext) {
762 if ctx.is_null() {
763 return;
764 }
765 unsafe {
766 let _ = Box::from_raw(ctx as *mut alloc::rc::Rc<EvalContext>);
767 }
768}
769
770#[unsafe(no_mangle)]
772pub extern "C" fn expr_context_native_function_count(ctx: *const ExprContext) -> usize {
773 if ctx.is_null() {
774 return 0;
775 }
776
777 unsafe {
778 let ctx = &*(ctx as *const alloc::rc::Rc<EvalContext>);
779 ctx.list_native_functions().len()
780 }
781}
782
783#[unsafe(no_mangle)]
787pub extern "C" fn expr_context_get_native_function_name(
788 ctx: *const ExprContext,
789 index: usize,
790 buffer: *mut u8,
791 buffer_size: usize,
792) -> usize {
793 if ctx.is_null() {
794 return 0;
795 }
796
797 unsafe {
798 let ctx = &*(ctx as *const alloc::rc::Rc<EvalContext>);
799 let functions = ctx.list_native_functions();
800
801 if index >= functions.len() {
802 return 0;
803 }
804
805 let name = &functions[index];
806 let name_bytes = name.as_bytes();
807
808 if buffer.is_null() {
809 return name_bytes.len();
810 }
811
812 let copy_len = core::cmp::min(name_bytes.len(), buffer_size);
813 core::ptr::copy_nonoverlapping(name_bytes.as_ptr(), buffer, copy_len);
814
815 name_bytes.len()
816 }
817}
818
819#[unsafe(no_mangle)]
830pub extern "C" fn expr_context_add_function(
831 ctx: *mut ExprContext,
832 name: *const c_char,
833 arity: usize,
834 func: NativeFunc,
835) -> i32 {
836 if ctx.is_null() || name.is_null() {
837 return -1;
838 }
839
840 let ctx_handle = unsafe { &mut *(ctx as *mut alloc::rc::Rc<EvalContext>) };
841
842 let name_cstr = unsafe { CStr::from_ptr(name) };
843 let name_str = match name_cstr.to_str() {
844 Ok(s) => s,
845 Err(_) => return -2, };
847
848 let implementation = move |args: &[Real]| -> Real {
850 if args.len() != arity {
851 return Real::NAN;
852 }
853 func(args.as_ptr(), args.len())
854 };
855
856 match alloc::rc::Rc::get_mut(ctx_handle) {
858 Some(ctx_mut) => {
859 match ctx_mut.register_native_function(name_str, arity, implementation) {
860 Ok(_) => 0,
861 Err(_) => -3, }
863 }
864 None => -4, }
866}
867
868#[unsafe(no_mangle)]
882pub extern "C" fn expr_batch_add_expression_function(
883 batch: *mut ExprBatch,
884 name: *const c_char,
885 params: *const c_char,
886 expression: *const c_char,
887) -> i32 {
888 if batch.is_null() || name.is_null() || params.is_null() || expression.is_null() {
889 return -1;
890 }
891
892 let wrapper = unsafe { &*(batch as *const BatchWithArena) };
893 let builder = unsafe { &mut *wrapper.batch };
894
895 let name_cstr = unsafe { CStr::from_ptr(name) };
897 let name_str = match name_cstr.to_str() {
898 Ok(s) => s,
899 Err(_) => return -2, };
901
902 let params_cstr = unsafe { CStr::from_ptr(params) };
903 let params_str = match params_cstr.to_str() {
904 Ok(s) => s,
905 Err(_) => return -2, };
907
908 let expr_cstr = unsafe { CStr::from_ptr(expression) };
909 let expr_str = match expr_cstr.to_str() {
910 Ok(s) => s,
911 Err(_) => return -2, };
913
914 let param_vec: Vec<&str> = if params_str.is_empty() {
916 Vec::new()
917 } else {
918 params_str.split(',').map(|s| s.trim()).collect()
919 };
920
921 match builder.register_expression_function(name_str, ¶m_vec, expr_str) {
923 Ok(_) => 0,
924 Err(_) => -3, }
926}
927
928#[unsafe(no_mangle)]
939pub extern "C" fn expr_batch_remove_expression_function(
940 batch: *mut ExprBatch,
941 name: *const c_char,
942) -> i32 {
943 if batch.is_null() || name.is_null() {
944 return -1;
945 }
946
947 let wrapper = unsafe { &*(batch as *const BatchWithArena) };
948 let builder = unsafe { &mut *wrapper.batch };
949
950 let name_cstr = unsafe { CStr::from_ptr(name) };
951 let name_str = match name_cstr.to_str() {
952 Ok(s) => s,
953 Err(_) => return -2, };
955
956 match builder.unregister_expression_function(name_str) {
957 Ok(was_removed) => {
958 if was_removed {
959 1
960 } else {
961 0
962 }
963 }
964 Err(_) => -3, }
966}
967
968#[unsafe(no_mangle)]
1040pub extern "C" fn expr_batch_new(size_hint: usize) -> *mut ExprBatch {
1041 let arena_size = if size_hint == 0 { 8192 } else { size_hint };
1043
1044 let arena = Box::new(Bump::with_capacity(arena_size));
1046 let arena_ptr = Box::into_raw(arena);
1047 let arena_ref: &'static Bump = unsafe { &*arena_ptr };
1048
1049 let batch = Box::new(Expression::new(arena_ref));
1051 let batch_ptr = Box::into_raw(batch);
1052
1053 let wrapper = Box::new(BatchWithArena {
1055 magic: BATCH_MAGIC,
1056 arena: arena_ptr,
1057 batch: batch_ptr,
1058 });
1059
1060 Box::into_raw(wrapper) as *mut ExprBatch
1061}
1062
1063#[unsafe(no_mangle)]
1075pub extern "C" fn expr_batch_is_valid(batch: *const ExprBatch) -> ExprResult {
1076 if batch.is_null() {
1077 return ExprResult::from_ffi_error(FFI_ERROR_NULL_POINTER, "Batch pointer is NULL");
1078 }
1079
1080 unsafe {
1081 let wrapper = batch as *const BatchWithArena;
1082 let magic = (*wrapper).magic;
1083
1084 if magic == BATCH_MAGIC {
1085 ExprResult::success_value(1.0)
1087 } else if magic == BATCH_FREED {
1088 ExprResult::from_ffi_error(
1090 FFI_ERROR_INVALID_POINTER,
1091 "Batch has already been freed (double-free detected)",
1092 )
1093 } else {
1094 ExprResult::from_ffi_error(
1097 FFI_ERROR_INVALID_POINTER,
1098 "Invalid or corrupted batch pointer",
1099 )
1100 }
1101 }
1102}
1103
1104#[unsafe(no_mangle)]
1111pub extern "C" fn expr_batch_free(batch: *mut ExprBatch) {
1112 if batch.is_null() {
1113 return;
1114 }
1115
1116 unsafe {
1117 let wrapper = batch as *mut BatchWithArena;
1119 let magic = (*wrapper).magic;
1120
1121 if magic == BATCH_FREED {
1122 #[cfg(debug_assertions)]
1125 panic!("Double-free detected on ExprBatch at {:p}", batch);
1126
1127 #[cfg(not(debug_assertions))]
1128 return; }
1130
1131 if magic != BATCH_MAGIC {
1132 #[cfg(debug_assertions)]
1135 panic!(
1136 "Invalid ExprBatch pointer at {:p} (magic: 0x{:x})",
1137 batch, magic
1138 );
1139
1140 #[cfg(not(debug_assertions))]
1141 return; }
1143
1144 let _ = Box::from_raw(wrapper);
1146 }
1147}
1148
1149#[unsafe(no_mangle)]
1164pub extern "C" fn expr_batch_clear(batch: *mut ExprBatch) -> i32 {
1165 if batch.is_null() {
1166 return FFI_ERROR_NULL_POINTER;
1167 }
1168
1169 unsafe {
1170 let wrapper = &mut *(batch as *mut BatchWithArena);
1171
1172 if wrapper.magic != BATCH_MAGIC {
1174 #[cfg(debug_assertions)]
1175 panic!(
1176 "Invalid or freed ExprBatch pointer at {:p} (magic: 0x{:x})",
1177 batch, wrapper.magic
1178 );
1179
1180 #[cfg(not(debug_assertions))]
1181 return FFI_ERROR_INVALID_POINTER; }
1183
1184 (*wrapper.batch).clear();
1185 }
1186
1187 0
1188}
1189
1190#[unsafe(no_mangle)]
1199pub extern "C" fn expr_batch_add_expression(
1200 batch: *mut ExprBatch,
1201 expr: *const c_char,
1202) -> ExprResult {
1203 if batch.is_null() || expr.is_null() {
1204 return ExprResult::from_ffi_error(
1205 FFI_ERROR_NULL_POINTER,
1206 "Null pointer passed to expr_batch_add_expression",
1207 );
1208 }
1209
1210 let wrapper = unsafe { &*(batch as *const BatchWithArena) };
1211 let builder = unsafe { &mut *wrapper.batch };
1212
1213 let expr_cstr = unsafe { CStr::from_ptr(expr) };
1214 let expr_str = match expr_cstr.to_str() {
1215 Ok(s) => s,
1216 Err(_) => {
1217 return ExprResult::from_ffi_error(
1218 FFI_ERROR_INVALID_UTF8,
1219 "Invalid UTF-8 in expression string",
1220 );
1221 }
1222 };
1223
1224 match builder.add_expression(expr_str) {
1225 Ok(idx) => ExprResult::success_index(idx),
1226 Err(e) => ExprResult::from_expr_error(e),
1227 }
1228}
1229
1230#[unsafe(no_mangle)]
1240pub extern "C" fn expr_batch_add_variable(
1241 batch: *mut ExprBatch,
1242 name: *const c_char,
1243 value: Real,
1244) -> ExprResult {
1245 if batch.is_null() || name.is_null() {
1246 return ExprResult::from_ffi_error(
1247 FFI_ERROR_NULL_POINTER,
1248 "Null pointer passed to expr_batch_add_variable",
1249 );
1250 }
1251
1252 let wrapper = unsafe { &*(batch as *const BatchWithArena) };
1253 let builder = unsafe { &mut *wrapper.batch };
1254
1255 let name_cstr = unsafe { CStr::from_ptr(name) };
1256 let name_str = match name_cstr.to_str() {
1257 Ok(s) => s,
1258 Err(_) => {
1259 return ExprResult::from_ffi_error(
1260 FFI_ERROR_INVALID_UTF8,
1261 "Invalid UTF-8 in variable name",
1262 );
1263 }
1264 };
1265
1266 match builder.add_parameter(name_str, value) {
1267 Ok(idx) => ExprResult::success_index(idx),
1268 Err(e) => ExprResult::from_expr_error(e),
1269 }
1270}
1271
1272#[unsafe(no_mangle)]
1282pub extern "C" fn expr_batch_set_variable(batch: *mut ExprBatch, index: usize, value: Real) -> i32 {
1283 if batch.is_null() {
1284 return -1;
1285 }
1286
1287 let wrapper = unsafe { &*(batch as *const BatchWithArena) };
1288 let builder = unsafe { &mut *wrapper.batch };
1289
1290 match builder.set_param(index, value) {
1291 Ok(_) => 0,
1292 Err(_) => -2, }
1294}
1295
1296#[unsafe(no_mangle)]
1305pub extern "C" fn expr_batch_evaluate(batch: *mut ExprBatch, ctx: *mut ExprContext) -> i32 {
1306 if batch.is_null() {
1307 return -1;
1308 }
1309
1310 let wrapper = unsafe { &*(batch as *const BatchWithArena) };
1311 let builder = unsafe { &mut *wrapper.batch };
1312
1313 let eval_ctx = if ctx.is_null() {
1314 alloc::rc::Rc::new(EvalContext::new())
1315 } else {
1316 unsafe {
1317 let ctx_rc = &*(ctx as *const alloc::rc::Rc<EvalContext>);
1318 ctx_rc.clone()
1319 }
1320 };
1321
1322 match builder.eval(&eval_ctx) {
1323 Ok(_) => 0,
1324 Err(_) => -2, }
1326}
1327
1328#[unsafe(no_mangle)]
1337pub extern "C" fn expr_batch_get_result(batch: *const ExprBatch, index: usize) -> Real {
1338 if batch.is_null() {
1339 return Real::NAN;
1340 }
1341
1342 let wrapper = unsafe { &*(batch as *const BatchWithArena) };
1343 let builder = unsafe { &*wrapper.batch };
1344 builder.get_result(index).unwrap_or(Real::NAN)
1345}
1346
1347#[unsafe(no_mangle)]
1356pub extern "C" fn expr_batch_arena_bytes(batch: *const ExprBatch) -> usize {
1357 if batch.is_null() {
1358 return 0;
1359 }
1360
1361 let wrapper = unsafe { &*(batch as *const BatchWithArena) };
1362 let builder = unsafe { &*wrapper.batch };
1363 builder.arena_allocated_bytes()
1364}
1365
1366#[unsafe(no_mangle)]
1375pub extern "C" fn expr_batch_evaluate_ex(
1376 batch: *mut ExprBatch,
1377 ctx: *mut ExprContext,
1378) -> ExprResult {
1379 if batch.is_null() {
1380 return ExprResult::from_ffi_error(FFI_ERROR_NULL_POINTER, "Null batch pointer");
1381 }
1382
1383 let wrapper = unsafe { &*(batch as *const BatchWithArena) };
1384 let builder = unsafe { &mut *wrapper.batch };
1385
1386 let eval_ctx = if ctx.is_null() {
1387 alloc::rc::Rc::new(EvalContext::new())
1388 } else {
1389 unsafe {
1390 let ctx_rc = &*(ctx as *const alloc::rc::Rc<EvalContext>);
1391 ctx_rc.clone()
1392 }
1393 };
1394
1395 match builder.eval(&eval_ctx) {
1396 Ok(_) => ExprResult::success_value(0.0), Err(e) => ExprResult::from_expr_error(e),
1398 }
1399}
1400
1401#[unsafe(no_mangle)]
1416pub extern "C" fn expr_estimate_arena_size(
1417 expression_count: usize,
1418 total_expr_length: usize,
1419 param_count: usize,
1420 _estimated_iterations: usize,
1421) -> usize {
1422 let expr_overhead = expression_count * 512;
1424
1425 let string_storage = total_expr_length * 2;
1427
1428 let param_storage = param_count * 64;
1430
1431 let total = expr_overhead + string_storage + param_storage;
1433 total + (total / 2)
1434}
1435
1436#[cfg(debug_assertions)]
1442#[unsafe(no_mangle)]
1443pub extern "C" fn exp_rs_test_trigger_panic() {
1444 panic!("Test panic triggered from C");
1445}
1446
1447#[cfg(all(not(test), target_arch = "arm"))]
1453#[panic_handler]
1454fn panic(info: &core::panic::PanicInfo) -> ! {
1455 unsafe {
1457 if !EXP_RS_PANIC_FLAG.is_null() {
1458 *EXP_RS_PANIC_FLAG = 1;
1459 }
1460
1461 if !EXP_RS_LOG_FUNCTION.is_null() {
1463 let log_func: LogFunctionType = core::mem::transmute(EXP_RS_LOG_FUNCTION);
1465
1466 if let Some(location) = info.location() {
1470 let file = location.file();
1472 let _line = location.line(); log_func(file.as_ptr(), file.len());
1476
1477 } else {
1480 log_func(PANIC_DEFAULT_MSG.as_ptr(), PANIC_DEFAULT_MSG.len() - 1);
1482 }
1483 }
1484 }
1485
1486 #[cfg(target_arch = "arm")]
1488 loop {
1489 unsafe {
1490 core::arch::asm!("udf #0");
1493 }
1494 }
1497
1498 #[cfg(not(target_arch = "arm"))]
1500 loop {
1501 core::hint::spin_loop();
1503 }
1504}
1505
1506#[cfg(test)]
1507mod tests {
1508 use super::*;
1509
1510 #[test]
1511 fn test_error_buffer_null_termination() {
1512 use core::ffi::c_char;
1513
1514 let short_msg = "Test error message";
1516 let buffer = ExprResult::copy_to_error_buffer(short_msg);
1517
1518 let mut found_null = false;
1520 for (i, &byte) in buffer.iter().enumerate() {
1521 if byte == 0 {
1522 found_null = true;
1523 let recovered_msg = unsafe {
1525 core::str::from_utf8_unchecked(core::slice::from_raw_parts(
1526 buffer.as_ptr() as *const u8,
1527 i,
1528 ))
1529 };
1530 assert_eq!(recovered_msg, short_msg);
1531 break;
1532 }
1533 }
1534 assert!(found_null, "Error buffer should be null terminated");
1535
1536 let max_msg = "a".repeat(crate::types::EXP_RS_ERROR_BUFFER_SIZE - 1);
1538 let buffer = ExprResult::copy_to_error_buffer(&max_msg);
1539
1540 assert_eq!(buffer[crate::types::EXP_RS_ERROR_BUFFER_SIZE - 1], 0);
1542
1543 assert_eq!(
1545 buffer[crate::types::EXP_RS_ERROR_BUFFER_SIZE - 2],
1546 b'a' as c_char
1547 );
1548
1549 let long_msg = "a".repeat(crate::types::EXP_RS_ERROR_BUFFER_SIZE + 10);
1551 let buffer = ExprResult::copy_to_error_buffer(&long_msg);
1552
1553 assert_eq!(buffer[crate::types::EXP_RS_ERROR_BUFFER_SIZE - 1], 0);
1555
1556 let recovered_msg = unsafe {
1558 core::str::from_utf8_unchecked(core::slice::from_raw_parts(
1559 buffer.as_ptr() as *const u8,
1560 crate::types::EXP_RS_ERROR_BUFFER_SIZE - 1,
1561 ))
1562 };
1563 assert_eq!(
1564 recovered_msg.len(),
1565 crate::types::EXP_RS_ERROR_BUFFER_SIZE - 1
1566 );
1567 assert!(recovered_msg.chars().all(|c| c == 'a'));
1568 }
1569}