Skip to main content

shape_jit/
jit_array.rs

1//! Native JIT array with guaranteed C-compatible layout.
2//!
3//! Replaces `Box<Vec<u64>>` for all JIT array operations, giving us:
4//! - **Guaranteed memory layout** — offsets are ABI-stable, no `repr(Rust)` surprises
5//! - **Zero-FFI array access** — inline AND + 2 LOADs instead of calling `jit_array_info`
6//! - **Typed element tracking** — optional kind + side-buffer for strict numeric/bool fast paths
7//! - **GC-ready** — can add `gc_mark` field when needed
8//!
9//! Memory layout (`#[repr(C)]`, all offsets guaranteed):
10//! ```text
11//!   offset  0: data         — *mut u64 (boxed element buffer)
12//!   offset  8: len          — u64 (number of elements)
13//!   offset 16: cap          — u64 (allocated capacity)
14//!   offset 24: typed_data   — *mut u64 (raw typed payload mirror, optional)
15//!   offset 32: element_kind — u8  (ArrayElementKind tag)
16//! ```
17
18use crate::nan_boxing::{TAG_BOOL_FALSE, TAG_BOOL_TRUE, is_number, unbox_number};
19use std::alloc::{self, Layout};
20use std::slice;
21
22pub const DATA_OFFSET: i32 = 0;
23pub const LEN_OFFSET: i32 = 8;
24pub const CAP_OFFSET: i32 = 16;
25pub const TYPED_DATA_OFFSET: i32 = 24;
26pub const ELEMENT_KIND_OFFSET: i32 = 32;
27
28#[repr(u8)]
29#[derive(Debug, Clone, Copy, PartialEq, Eq)]
30pub enum ArrayElementKind {
31    Untyped = 0,
32    Float64 = 1,
33    Int64 = 2,
34    Bool = 3,
35}
36
37impl ArrayElementKind {
38    #[inline]
39    pub fn from_byte(byte: u8) -> Self {
40        match byte {
41            1 => Self::Float64,
42            2 => Self::Int64,
43            3 => Self::Bool,
44            _ => Self::Untyped,
45        }
46    }
47
48    #[inline]
49    pub const fn as_byte(self) -> u8 {
50        self as u8
51    }
52}
53
54/// Native JIT array with guaranteed C-compatible layout.
55#[repr(C)]
56pub struct JitArray {
57    /// Pointer to boxed element buffer (heap-allocated)
58    pub data: *mut u64,
59    /// Number of elements currently stored
60    pub len: u64,
61    /// Allocated capacity (number of u64 elements)
62    pub cap: u64,
63    /// Optional raw typed payload buffer (mirrors `data` indices)
64    pub typed_data: *mut u64,
65    /// `ArrayElementKind` as byte
66    pub element_kind: u8,
67    /// Allocation layout backing `typed_data` (tracks bool bitset vs 8-byte lanes).
68    pub typed_storage_kind: u8,
69    /// Keep struct alignment stable and explicit.
70    pub _padding: [u8; 6],
71}
72
73impl JitArray {
74    /// Create an empty array.
75    pub fn new() -> Self {
76        Self {
77            data: std::ptr::null_mut(),
78            len: 0,
79            cap: 0,
80            typed_data: std::ptr::null_mut(),
81            element_kind: ArrayElementKind::Untyped.as_byte(),
82            typed_storage_kind: ArrayElementKind::Untyped.as_byte(),
83            _padding: [0; 6],
84        }
85    }
86
87    /// Create an array with pre-allocated capacity.
88    pub fn with_capacity(cap: usize) -> Self {
89        if cap == 0 {
90            return Self::new();
91        }
92        let data = Self::alloc_u64_buffer(cap);
93        Self {
94            data,
95            len: 0,
96            cap: cap as u64,
97            typed_data: std::ptr::null_mut(),
98            element_kind: ArrayElementKind::Untyped.as_byte(),
99            typed_storage_kind: ArrayElementKind::Untyped.as_byte(),
100            _padding: [0; 6],
101        }
102    }
103
104    /// Create an array by copying from a slice.
105    pub fn from_slice(elements: &[u64]) -> Self {
106        if elements.is_empty() {
107            return Self::new();
108        }
109
110        let cap = elements.len();
111        let data = Self::alloc_u64_buffer(cap);
112        unsafe {
113            std::ptr::copy_nonoverlapping(elements.as_ptr(), data, elements.len());
114        }
115
116        let mut arr = Self {
117            data,
118            len: elements.len() as u64,
119            cap: cap as u64,
120            typed_data: std::ptr::null_mut(),
121            element_kind: ArrayElementKind::Untyped.as_byte(),
122            typed_storage_kind: ArrayElementKind::Untyped.as_byte(),
123            _padding: [0; 6],
124        };
125        arr.initialize_typed_from_boxed(elements);
126        arr
127    }
128
129    /// Create an array from an owned `Vec<u64>` (takes ownership of the data).
130    pub fn from_vec(vec: Vec<u64>) -> Self {
131        if vec.is_empty() {
132            return Self::new();
133        }
134
135        let mut boxed = vec.into_boxed_slice();
136        let len = boxed.len();
137        let cap = len;
138        let data = boxed.as_mut_ptr();
139        std::mem::forget(boxed);
140
141        let mut arr = Self {
142            data,
143            len: len as u64,
144            cap: cap as u64,
145            typed_data: std::ptr::null_mut(),
146            element_kind: ArrayElementKind::Untyped.as_byte(),
147            typed_storage_kind: ArrayElementKind::Untyped.as_byte(),
148            _padding: [0; 6],
149        };
150
151        let elements = unsafe { slice::from_raw_parts(data, len) };
152        arr.initialize_typed_from_boxed(elements);
153        arr
154    }
155
156    #[inline]
157    fn alloc_u64_buffer(cap: usize) -> *mut u64 {
158        let layout = Layout::array::<u64>(cap).unwrap();
159        let data = unsafe { alloc::alloc(layout) as *mut u64 };
160        if data.is_null() {
161            alloc::handle_alloc_error(layout);
162        }
163        data
164    }
165
166    #[inline]
167    fn realloc_u64_buffer(ptr: *mut u64, old_cap: usize, new_cap: usize) -> *mut u64 {
168        let old_layout = Layout::array::<u64>(old_cap).unwrap();
169        let new_layout = Layout::array::<u64>(new_cap).unwrap();
170        let data =
171            unsafe { alloc::realloc(ptr as *mut u8, old_layout, new_layout.size()) as *mut u64 };
172        if data.is_null() {
173            alloc::handle_alloc_error(new_layout);
174        }
175        data
176    }
177
178    #[inline]
179    fn dealloc_u64_buffer(ptr: *mut u64, cap: usize) {
180        let layout = Layout::array::<u64>(cap).unwrap();
181        unsafe {
182            alloc::dealloc(ptr as *mut u8, layout);
183        }
184    }
185
186    #[inline]
187    fn typed_layout(kind: ArrayElementKind, cap: usize) -> Option<Layout> {
188        if cap == 0 {
189            return None;
190        }
191        match kind {
192            ArrayElementKind::Untyped => None,
193            ArrayElementKind::Bool => Layout::array::<u8>(cap.div_ceil(8)).ok(),
194            ArrayElementKind::Float64 | ArrayElementKind::Int64 => Layout::array::<u64>(cap).ok(),
195        }
196    }
197
198    #[inline]
199    fn alloc_typed_buffer(kind: ArrayElementKind, cap: usize) -> *mut u64 {
200        let Some(layout) = Self::typed_layout(kind, cap) else {
201            return std::ptr::null_mut();
202        };
203        let data = unsafe { alloc::alloc(layout) } as *mut u64;
204        if data.is_null() {
205            alloc::handle_alloc_error(layout);
206        }
207        data
208    }
209
210    #[inline]
211    fn realloc_typed_buffer(
212        ptr: *mut u64,
213        kind: ArrayElementKind,
214        old_cap: usize,
215        new_cap: usize,
216    ) -> *mut u64 {
217        let old_layout = Self::typed_layout(kind, old_cap)
218            .expect("typed_layout must exist for old typed allocation");
219        let new_layout = Self::typed_layout(kind, new_cap)
220            .expect("typed_layout must exist for new typed allocation");
221        let data =
222            unsafe { alloc::realloc(ptr as *mut u8, old_layout, new_layout.size()) } as *mut u64;
223        if data.is_null() {
224            alloc::handle_alloc_error(new_layout);
225        }
226        data
227    }
228
229    #[inline]
230    fn dealloc_typed_buffer(ptr: *mut u64, kind: ArrayElementKind, cap: usize) {
231        if ptr.is_null() {
232            return;
233        }
234        if let Some(layout) = Self::typed_layout(kind, cap) {
235            unsafe {
236                alloc::dealloc(ptr as *mut u8, layout);
237            }
238        }
239    }
240
241    #[inline]
242    fn kind(&self) -> ArrayElementKind {
243        ArrayElementKind::from_byte(self.element_kind)
244    }
245
246    #[inline]
247    fn set_kind(&mut self, kind: ArrayElementKind) {
248        self.element_kind = kind.as_byte();
249    }
250
251    #[inline]
252    fn typed_storage_kind(&self) -> ArrayElementKind {
253        ArrayElementKind::from_byte(self.typed_storage_kind)
254    }
255
256    #[inline]
257    pub fn element_kind(&self) -> ArrayElementKind {
258        self.kind()
259    }
260
261    #[inline]
262    pub fn typed_data_ptr(&self) -> *const u64 {
263        self.typed_data
264    }
265
266    #[inline]
267    fn try_number_to_i64(bits: u64) -> Option<i64> {
268        if !is_number(bits) {
269            return None;
270        }
271        let n = unbox_number(bits);
272        if !n.is_finite() || n < i64::MIN as f64 || n > i64::MAX as f64 {
273            return None;
274        }
275        let i = n as i64;
276        if (i as f64) == n { Some(i) } else { None }
277    }
278
279    fn infer_kind(elements: &[u64]) -> ArrayElementKind {
280        if elements.is_empty() {
281            return ArrayElementKind::Untyped;
282        }
283
284        if elements
285            .iter()
286            .all(|&v| v == TAG_BOOL_TRUE || v == TAG_BOOL_FALSE)
287        {
288            return ArrayElementKind::Bool;
289        }
290
291        let all_numbers = elements.iter().all(|&v| is_number(v));
292        if !all_numbers {
293            return ArrayElementKind::Untyped;
294        }
295
296        if elements
297            .iter()
298            .all(|&v| Self::try_number_to_i64(v).is_some())
299        {
300            ArrayElementKind::Int64
301        } else {
302            ArrayElementKind::Float64
303        }
304    }
305
306    fn bootstrap_kind_from_first_value(value: u64) -> ArrayElementKind {
307        if value == TAG_BOOL_TRUE || value == TAG_BOOL_FALSE {
308            ArrayElementKind::Bool
309        } else if is_number(value) {
310            // Prefer Float64 for push-built numeric arrays to avoid
311            // accidental integer pinning in float-heavy kernels.
312            ArrayElementKind::Float64
313        } else {
314            ArrayElementKind::Untyped
315        }
316    }
317
318    fn ensure_typed_buffer(&mut self, kind: ArrayElementKind) {
319        if self.cap == 0 || kind == ArrayElementKind::Untyped {
320            return;
321        }
322        if self.typed_data.is_null() {
323            self.typed_data = Self::alloc_typed_buffer(kind, self.cap as usize);
324            self.typed_storage_kind = kind.as_byte();
325            return;
326        }
327        let current = self.typed_storage_kind();
328        if current != kind {
329            Self::dealloc_typed_buffer(self.typed_data, current, self.cap as usize);
330            self.typed_data = Self::alloc_typed_buffer(kind, self.cap as usize);
331            self.typed_storage_kind = kind.as_byte();
332        }
333    }
334
335    fn write_typed_slot(&mut self, index: usize, boxed_value: u64) -> bool {
336        if self.typed_data.is_null() || index >= self.cap as usize {
337            return false;
338        }
339
340        let kind = self.kind();
341        let raw = match kind {
342            ArrayElementKind::Untyped => return false,
343            ArrayElementKind::Float64 => {
344                if !is_number(boxed_value) {
345                    return false;
346                }
347                boxed_value
348            }
349            ArrayElementKind::Int64 => match Self::try_number_to_i64(boxed_value) {
350                Some(v) => v as u64,
351                None => return false,
352            },
353            ArrayElementKind::Bool => {
354                if boxed_value == TAG_BOOL_TRUE {
355                    1
356                } else if boxed_value == TAG_BOOL_FALSE {
357                    0
358                } else {
359                    return false;
360                }
361            }
362        };
363
364        match kind {
365            ArrayElementKind::Bool => {
366                let byte_idx = index >> 3;
367                let bit_idx = (index & 7) as u8;
368                let mask = 1u8 << bit_idx;
369                let byte_ptr = self.typed_data as *mut u8;
370                unsafe {
371                    let prev = *byte_ptr.add(byte_idx);
372                    let next = if raw == 0 { prev & !mask } else { prev | mask };
373                    *byte_ptr.add(byte_idx) = next;
374                }
375                true
376            }
377            _ => {
378                unsafe {
379                    *self.typed_data.add(index) = raw;
380                }
381                true
382            }
383        }
384    }
385
386    fn initialize_typed_from_boxed(&mut self, elements: &[u64]) {
387        let kind = Self::infer_kind(elements);
388        if kind == ArrayElementKind::Untyped {
389            self.set_kind(ArrayElementKind::Untyped);
390            return;
391        }
392
393        self.ensure_typed_buffer(kind);
394        if self.typed_data.is_null() {
395            self.set_kind(ArrayElementKind::Untyped);
396            return;
397        }
398
399        self.set_kind(kind);
400        for (idx, &value) in elements.iter().enumerate() {
401            if !self.write_typed_slot(idx, value) {
402                self.set_kind(ArrayElementKind::Untyped);
403                return;
404            }
405        }
406    }
407
408    fn update_typed_on_write(&mut self, index: usize, boxed_value: u64) {
409        let kind = self.kind();
410
411        if kind == ArrayElementKind::Untyped {
412            if self.len == 0 && index == 0 {
413                let bootstrap = Self::bootstrap_kind_from_first_value(boxed_value);
414                if bootstrap != ArrayElementKind::Untyped {
415                    self.ensure_typed_buffer(bootstrap);
416                    if !self.typed_data.is_null() {
417                        self.set_kind(bootstrap);
418                        if !self.write_typed_slot(index, boxed_value) {
419                            self.set_kind(ArrayElementKind::Untyped);
420                        }
421                    }
422                }
423            }
424            return;
425        }
426
427        if !self.write_typed_slot(index, boxed_value) {
428            // Keep buffer allocated; dropping kind gates correctness.
429            self.set_kind(ArrayElementKind::Untyped);
430        }
431    }
432
433    /// Number of elements.
434    #[inline]
435    pub fn len(&self) -> usize {
436        self.len as usize
437    }
438
439    /// Check if empty.
440    #[inline]
441    pub fn is_empty(&self) -> bool {
442        self.len == 0
443    }
444
445    /// View elements as a slice.
446    #[inline]
447    pub fn as_slice(&self) -> &[u64] {
448        if self.data.is_null() || self.len == 0 {
449            return &[];
450        }
451        unsafe { slice::from_raw_parts(self.data, self.len as usize) }
452    }
453
454    /// View elements as a mutable slice.
455    #[inline]
456    pub fn as_mut_slice(&mut self) -> &mut [u64] {
457        if self.data.is_null() || self.len == 0 {
458            return &mut [];
459        }
460        unsafe { slice::from_raw_parts_mut(self.data, self.len as usize) }
461    }
462
463    /// Get element by index (bounds-checked).
464    #[inline]
465    pub fn get(&self, index: usize) -> Option<&u64> {
466        if index < self.len as usize {
467            unsafe { Some(&*self.data.add(index)) }
468        } else {
469            None
470        }
471    }
472
473    /// Set an element by index (bounds-checked).
474    /// Returns true when the write succeeded.
475    pub fn set_boxed(&mut self, index: usize, value: u64) -> bool {
476        if index >= self.len as usize {
477            return false;
478        }
479        unsafe {
480            *self.data.add(index) = value;
481        }
482        self.update_typed_on_write(index, value);
483        true
484    }
485
486    /// Push an element (amortized O(1) with doubling growth).
487    pub fn push(&mut self, value: u64) {
488        if self.len == self.cap {
489            self.grow();
490        }
491        let index = self.len as usize;
492        unsafe {
493            *self.data.add(index) = value;
494        }
495        self.update_typed_on_write(index, value);
496        self.len += 1;
497    }
498
499    /// Ensure capacity is at least `min_capacity` elements.
500    pub fn reserve(&mut self, min_capacity: usize) {
501        if min_capacity <= self.cap as usize {
502            return;
503        }
504        let mut new_cap = if self.cap == 0 {
505            4usize
506        } else {
507            self.cap as usize
508        };
509        while new_cap < min_capacity {
510            new_cap = new_cap.saturating_mul(2);
511        }
512        self.grow_to(new_cap);
513    }
514
515    /// Pop the last element.
516    pub fn pop(&mut self) -> Option<u64> {
517        if self.len == 0 {
518            return None;
519        }
520        self.len -= 1;
521        unsafe { Some(*self.data.add(self.len as usize)) }
522    }
523
524    /// Iterate over elements.
525    #[inline]
526    pub fn iter(&self) -> slice::Iter<'_, u64> {
527        self.as_slice().iter()
528    }
529
530    /// Get first element.
531    #[inline]
532    pub fn first(&self) -> Option<&u64> {
533        if self.len > 0 {
534            unsafe { Some(&*self.data) }
535        } else {
536            None
537        }
538    }
539
540    /// Get last element.
541    #[inline]
542    pub fn last(&self) -> Option<&u64> {
543        if self.len > 0 {
544            unsafe { Some(&*self.data.add(self.len as usize - 1)) }
545        } else {
546            None
547        }
548    }
549
550    /// Deep copy of element buffer.
551    pub fn clone_data(&self) -> Self {
552        Self::from_slice(self.as_slice())
553    }
554
555    /// Convert to Vec<u64> for interop with remaining Rust code paths.
556    pub fn into_vec(self) -> Vec<u64> {
557        let vec = self.as_slice().to_vec();
558        // Don't drop self normally — we've copied the data.
559        // The original buffer will be freed by Drop.
560        vec
561    }
562
563    /// Raw pointer to data buffer (for JIT inline access).
564    #[inline]
565    pub fn as_ptr(&self) -> *const u64 {
566        self.data
567    }
568
569    /// Grow the buffer using amortized doubling.
570    fn grow(&mut self) {
571        let new_cap = if self.cap == 0 { 4 } else { self.cap * 2 };
572        self.grow_to(new_cap as usize);
573    }
574
575    /// Reallocate element storage to `new_cap` entries.
576    fn grow_to(&mut self, new_cap: usize) {
577        let old_cap = self.cap as usize;
578
579        self.data = if self.data.is_null() {
580            Self::alloc_u64_buffer(new_cap)
581        } else {
582            Self::realloc_u64_buffer(self.data, old_cap, new_cap)
583        };
584
585        if !self.typed_data.is_null() {
586            let typed_kind = self.typed_storage_kind();
587            self.typed_data = if old_cap == 0 {
588                Self::alloc_typed_buffer(typed_kind, new_cap)
589            } else {
590                Self::realloc_typed_buffer(self.typed_data, typed_kind, old_cap, new_cap)
591            };
592        }
593
594        self.cap = new_cap as u64;
595    }
596}
597
598impl Drop for JitArray {
599    fn drop(&mut self) {
600        if !self.data.is_null() && self.cap > 0 {
601            Self::dealloc_u64_buffer(self.data, self.cap as usize);
602        }
603        if !self.typed_data.is_null() && self.cap > 0 {
604            let typed_kind = self.typed_storage_kind();
605            Self::dealloc_typed_buffer(self.typed_data, typed_kind, self.cap as usize);
606        }
607    }
608}
609
610// Index access.
611impl std::ops::Index<usize> for JitArray {
612    type Output = u64;
613
614    #[inline]
615    fn index(&self, index: usize) -> &u64 {
616        assert!(index < self.len as usize, "JitArray index out of bounds");
617        unsafe { &*self.data.add(index) }
618    }
619}
620
621impl std::ops::IndexMut<usize> for JitArray {
622    #[inline]
623    fn index_mut(&mut self, index: usize) -> &mut u64 {
624        assert!(index < self.len as usize, "JitArray index out of bounds");
625        unsafe { &mut *self.data.add(index) }
626    }
627}
628
629#[cfg(test)]
630mod tests {
631    use super::*;
632    use crate::nan_boxing::box_number;
633
634    #[test]
635    fn test_repr_c_layout() {
636        assert_eq!(std::mem::offset_of!(JitArray, data), DATA_OFFSET as usize);
637        assert_eq!(std::mem::offset_of!(JitArray, len), LEN_OFFSET as usize);
638        assert_eq!(std::mem::offset_of!(JitArray, cap), CAP_OFFSET as usize);
639        assert_eq!(
640            std::mem::offset_of!(JitArray, typed_data),
641            TYPED_DATA_OFFSET as usize
642        );
643        assert_eq!(
644            std::mem::offset_of!(JitArray, element_kind),
645            ELEMENT_KIND_OFFSET as usize
646        );
647        assert_eq!(std::mem::size_of::<JitArray>(), 40);
648    }
649
650    #[test]
651    fn test_new_empty() {
652        let arr = JitArray::new();
653        assert_eq!(arr.len(), 0);
654        assert!(arr.is_empty());
655        let empty: &[u64] = &[];
656        assert_eq!(arr.as_slice(), empty);
657        assert_eq!(arr.element_kind(), ArrayElementKind::Untyped);
658    }
659
660    #[test]
661    fn test_from_slice() {
662        let arr = JitArray::from_slice(&[1u64, 2, 3]);
663        assert_eq!(arr.len(), 3);
664        assert_eq!(arr.as_slice(), &[1u64, 2, 3]);
665    }
666
667    #[test]
668    fn test_from_vec() {
669        let arr = JitArray::from_vec(vec![10, 20, 30]);
670        assert_eq!(arr.len(), 3);
671        assert_eq!(arr.as_slice(), &[10, 20, 30]);
672    }
673
674    #[test]
675    fn test_push_pop() {
676        let mut arr = JitArray::new();
677        arr.push(1);
678        arr.push(2);
679        arr.push(3);
680        assert_eq!(arr.len(), 3);
681        assert_eq!(arr.as_slice(), &[1, 2, 3]);
682
683        assert_eq!(arr.pop(), Some(3));
684        assert_eq!(arr.pop(), Some(2));
685        assert_eq!(arr.len(), 1);
686        assert_eq!(arr.pop(), Some(1));
687        assert_eq!(arr.pop(), None);
688    }
689
690    #[test]
691    fn test_get() {
692        let arr = JitArray::from_slice(&[10, 20, 30]);
693        assert_eq!(arr.get(0), Some(&10));
694        assert_eq!(arr.get(2), Some(&30));
695        assert_eq!(arr.get(3), None);
696    }
697
698    #[test]
699    fn test_first_last() {
700        let arr = JitArray::from_slice(&[10, 20, 30]);
701        assert_eq!(arr.first(), Some(&10));
702        assert_eq!(arr.last(), Some(&30));
703
704        let empty = JitArray::new();
705        assert_eq!(empty.first(), None);
706        assert_eq!(empty.last(), None);
707    }
708
709    #[test]
710    fn test_clone_data() {
711        let arr = JitArray::from_slice(&[1, 2, 3]);
712        let cloned = arr.clone_data();
713        assert_eq!(cloned.as_slice(), arr.as_slice());
714        // Ensure different buffers.
715        assert_ne!(arr.data, cloned.data);
716    }
717
718    #[test]
719    fn test_into_vec() {
720        let arr = JitArray::from_slice(&[5, 10, 15]);
721        let vec = arr.into_vec();
722        assert_eq!(vec, vec![5, 10, 15]);
723    }
724
725    #[test]
726    fn test_growth() {
727        let mut arr = JitArray::new();
728        for i in 0..100 {
729            arr.push(i);
730        }
731        assert_eq!(arr.len(), 100);
732        for i in 0..100 {
733            assert_eq!(arr[i], i as u64);
734        }
735    }
736
737    #[test]
738    fn test_index_access() {
739        let mut arr = JitArray::from_slice(&[10, 20, 30]);
740        assert_eq!(arr[0], 10);
741        assert_eq!(arr[1], 20);
742        arr[1] = 99;
743        assert_eq!(arr[1], 99);
744    }
745
746    #[test]
747    fn test_set_boxed_updates_value() {
748        let mut arr = JitArray::from_slice(&[10, 20, 30]);
749        assert!(arr.set_boxed(1, 99));
750        assert_eq!(arr[1], 99);
751        assert!(!arr.set_boxed(4, 123));
752    }
753
754    #[test]
755    fn test_with_capacity() {
756        let mut arr = JitArray::with_capacity(10);
757        assert_eq!(arr.len(), 0);
758        assert!(arr.is_empty());
759        arr.push(42);
760        assert_eq!(arr.len(), 1);
761        assert_eq!(arr[0], 42);
762    }
763
764    #[test]
765    fn test_reserve_preserves_existing_elements() {
766        let mut arr = JitArray::from_slice(&[1, 2, 3]);
767        let old_cap = arr.cap;
768        arr.reserve(64);
769        assert!(arr.cap >= 64);
770        assert!(arr.cap >= old_cap);
771        assert_eq!(arr.as_slice(), &[1, 2, 3]);
772    }
773
774    #[test]
775    fn test_iter() {
776        let arr = JitArray::from_slice(&[1, 2, 3]);
777        let sum: u64 = arr.iter().sum();
778        assert_eq!(sum, 6);
779    }
780
781    #[test]
782    fn test_bootstrap_float_kind_on_first_push() {
783        let mut arr = JitArray::new();
784        arr.push(box_number(1.5));
785        assert_eq!(arr.element_kind(), ArrayElementKind::Float64);
786        assert!(!arr.typed_data_ptr().is_null());
787    }
788
789    #[test]
790    fn test_bootstrap_bool_kind_on_first_push() {
791        let mut arr = JitArray::new();
792        arr.push(TAG_BOOL_TRUE);
793        assert_eq!(arr.element_kind(), ArrayElementKind::Bool);
794        assert!(!arr.typed_data_ptr().is_null());
795    }
796
797    #[test]
798    fn test_invalidate_bool_kind_on_non_bool_write() {
799        let mut arr = JitArray::new();
800        arr.push(TAG_BOOL_TRUE);
801        arr.push(TAG_BOOL_FALSE);
802        assert_eq!(arr.element_kind(), ArrayElementKind::Bool);
803        arr.push(box_number(2.0));
804        assert_eq!(arr.element_kind(), ArrayElementKind::Untyped);
805    }
806}