Skip to main content

shape_jit/
jit_array.rs

1//! Native JIT array with guaranteed C-compatible layout.
2//!
3//! Replaces `Box<Vec<u64>>` for all JIT array operations, giving us:
4//! - **Guaranteed memory layout** — offsets are ABI-stable, no `repr(Rust)` surprises
5//! - **Zero-FFI array access** — inline AND + 2 LOADs instead of calling `jit_array_info`
6//! - **Typed element tracking** — optional kind + side-buffer for strict numeric/bool fast paths
7//! - **GC-ready** — can add `gc_mark` field when needed
8//!
9//! Memory layout (`#[repr(C)]`, all offsets guaranteed):
10//! ```text
11//!   offset  0: data              — *mut u64 (boxed element buffer)
12//!   offset  8: len               — u64 (number of elements)
13//!   offset 16: cap               — u64 (allocated capacity)
14//!   offset 24: typed_data        — *mut u64 (raw typed payload mirror, optional)
15//!   offset 32: element_kind      — u8  (ArrayElementKind tag)
16//!   offset 33: typed_storage_kind— u8
17//!   offset 40: slice_parent_arc  — *const () (leaked Arc<MatrixData> for FloatArraySlice round-trip)
18//!   offset 48: slice_offset      — u32 (row offset into parent matrix data)
19//!   offset 52: slice_len         — u32 (number of elements in the slice)
20//! ```
21
22use crate::nan_boxing::{TAG_BOOL_FALSE, TAG_BOOL_TRUE, is_number, unbox_number};
23use std::alloc::{self, Layout};
24use std::slice;
25
26pub const DATA_OFFSET: i32 = 0;
27pub const LEN_OFFSET: i32 = 8;
28pub const CAP_OFFSET: i32 = 16;
29pub const TYPED_DATA_OFFSET: i32 = 24;
30pub const ELEMENT_KIND_OFFSET: i32 = 32;
31
32#[repr(u8)]
33#[derive(Debug, Clone, Copy, PartialEq, Eq)]
34pub enum ArrayElementKind {
35    Untyped = 0,
36    Float64 = 1,
37    Int64 = 2,
38    Bool = 3,
39    I8 = 4,
40    I16 = 5,
41    I32 = 6,
42    U8 = 7,
43    U16 = 8,
44    U32 = 9,
45    U64 = 10,
46    F32 = 11,
47}
48
49impl ArrayElementKind {
50    #[inline]
51    pub fn from_byte(byte: u8) -> Self {
52        match byte {
53            1 => Self::Float64,
54            2 => Self::Int64,
55            3 => Self::Bool,
56            4 => Self::I8,
57            5 => Self::I16,
58            6 => Self::I32,
59            7 => Self::U8,
60            8 => Self::U16,
61            9 => Self::U32,
62            10 => Self::U64,
63            11 => Self::F32,
64            _ => Self::Untyped,
65        }
66    }
67
68    #[inline]
69    pub const fn as_byte(self) -> u8 {
70        self as u8
71    }
72}
73
74/// Native JIT array with guaranteed C-compatible layout.
75#[repr(C)]
76pub struct JitArray {
77    /// Pointer to boxed element buffer (heap-allocated)
78    pub data: *mut u64,
79    /// Number of elements currently stored
80    pub len: u64,
81    /// Allocated capacity (number of u64 elements)
82    pub cap: u64,
83    /// Optional raw typed payload buffer (mirrors `data` indices)
84    pub typed_data: *mut u64,
85    /// `ArrayElementKind` as byte
86    pub element_kind: u8,
87    /// Allocation layout backing `typed_data` (tracks bool bitset vs 8-byte lanes).
88    pub typed_storage_kind: u8,
89    /// Keep struct alignment stable and explicit.
90    pub _padding: [u8; 6],
91    /// For FloatArraySlice round-trip: leaked `Arc<MatrixData>` pointer.
92    /// Null for non-slice arrays. On JIT exit, this is reconstituted into
93    /// an Arc to rebuild the FloatArraySlice with correct parent linkage.
94    pub slice_parent_arc: *const (),
95    /// Row offset into the parent matrix's data buffer (for FloatArraySlice).
96    pub slice_offset: u32,
97    /// Element count of the slice (for FloatArraySlice).
98    pub slice_len: u32,
99}
100
101impl JitArray {
102    /// Create an empty array.
103    pub fn new() -> Self {
104        Self {
105            data: std::ptr::null_mut(),
106            len: 0,
107            cap: 0,
108            typed_data: std::ptr::null_mut(),
109            element_kind: ArrayElementKind::Untyped.as_byte(),
110            typed_storage_kind: ArrayElementKind::Untyped.as_byte(),
111            _padding: [0; 6],
112            slice_parent_arc: std::ptr::null(),
113            slice_offset: 0,
114            slice_len: 0,
115        }
116    }
117
118    /// Create an array with pre-allocated capacity.
119    pub fn with_capacity(cap: usize) -> Self {
120        if cap == 0 {
121            return Self::new();
122        }
123        let data = Self::alloc_u64_buffer(cap);
124        Self {
125            data,
126            len: 0,
127            cap: cap as u64,
128            typed_data: std::ptr::null_mut(),
129            element_kind: ArrayElementKind::Untyped.as_byte(),
130            typed_storage_kind: ArrayElementKind::Untyped.as_byte(),
131            _padding: [0; 6],
132            slice_parent_arc: std::ptr::null(),
133            slice_offset: 0,
134            slice_len: 0,
135        }
136    }
137
138    /// Create an array by copying from a slice.
139    pub fn from_slice(elements: &[u64]) -> Self {
140        if elements.is_empty() {
141            return Self::new();
142        }
143
144        let cap = elements.len();
145        let data = Self::alloc_u64_buffer(cap);
146        unsafe {
147            std::ptr::copy_nonoverlapping(elements.as_ptr(), data, elements.len());
148        }
149
150        let mut arr = Self {
151            data,
152            len: elements.len() as u64,
153            cap: cap as u64,
154            typed_data: std::ptr::null_mut(),
155            element_kind: ArrayElementKind::Untyped.as_byte(),
156            typed_storage_kind: ArrayElementKind::Untyped.as_byte(),
157            _padding: [0; 6],
158            slice_parent_arc: std::ptr::null(),
159            slice_offset: 0,
160            slice_len: 0,
161        };
162        arr.initialize_typed_from_boxed(elements);
163        arr
164    }
165
166    /// Create an array from an owned `Vec<u64>` (takes ownership of the data).
167    pub fn from_vec(vec: Vec<u64>) -> Self {
168        if vec.is_empty() {
169            return Self::new();
170        }
171
172        let mut boxed = vec.into_boxed_slice();
173        let len = boxed.len();
174        let cap = len;
175        let data = boxed.as_mut_ptr();
176        std::mem::forget(boxed);
177
178        let mut arr = Self {
179            data,
180            len: len as u64,
181            cap: cap as u64,
182            typed_data: std::ptr::null_mut(),
183            element_kind: ArrayElementKind::Untyped.as_byte(),
184            typed_storage_kind: ArrayElementKind::Untyped.as_byte(),
185            _padding: [0; 6],
186            slice_parent_arc: std::ptr::null(),
187            slice_offset: 0,
188            slice_len: 0,
189        };
190
191        let elements = unsafe { slice::from_raw_parts(data, len) };
192        arr.initialize_typed_from_boxed(elements);
193        arr
194    }
195
196    #[inline]
197    fn alloc_u64_buffer(cap: usize) -> *mut u64 {
198        let layout = Layout::array::<u64>(cap).unwrap();
199        let data = unsafe { alloc::alloc(layout) as *mut u64 };
200        if data.is_null() {
201            alloc::handle_alloc_error(layout);
202        }
203        data
204    }
205
206    #[inline]
207    fn realloc_u64_buffer(ptr: *mut u64, old_cap: usize, new_cap: usize) -> *mut u64 {
208        let old_layout = Layout::array::<u64>(old_cap).unwrap();
209        let new_layout = Layout::array::<u64>(new_cap).unwrap();
210        let data =
211            unsafe { alloc::realloc(ptr as *mut u8, old_layout, new_layout.size()) as *mut u64 };
212        if data.is_null() {
213            alloc::handle_alloc_error(new_layout);
214        }
215        data
216    }
217
218    #[inline]
219    fn dealloc_u64_buffer(ptr: *mut u64, cap: usize) {
220        let layout = Layout::array::<u64>(cap).unwrap();
221        unsafe {
222            alloc::dealloc(ptr as *mut u8, layout);
223        }
224    }
225
226    #[inline]
227    fn typed_layout(kind: ArrayElementKind, cap: usize) -> Option<Layout> {
228        if cap == 0 {
229            return None;
230        }
231        match kind {
232            ArrayElementKind::Untyped => None,
233            ArrayElementKind::Bool | ArrayElementKind::I8 | ArrayElementKind::U8 => {
234                Layout::array::<u8>(cap.div_ceil(if kind == ArrayElementKind::Bool { 8 } else { 1 })).ok()
235            }
236            ArrayElementKind::I16 | ArrayElementKind::U16 => Layout::array::<u16>(cap).ok(),
237            ArrayElementKind::I32 | ArrayElementKind::U32 | ArrayElementKind::F32 => {
238                Layout::array::<u32>(cap).ok()
239            }
240            ArrayElementKind::Float64 | ArrayElementKind::Int64 | ArrayElementKind::U64 => {
241                Layout::array::<u64>(cap).ok()
242            }
243        }
244    }
245
246    #[inline]
247    fn alloc_typed_buffer(kind: ArrayElementKind, cap: usize) -> *mut u64 {
248        let Some(layout) = Self::typed_layout(kind, cap) else {
249            return std::ptr::null_mut();
250        };
251        let data = unsafe { alloc::alloc(layout) } as *mut u64;
252        if data.is_null() {
253            alloc::handle_alloc_error(layout);
254        }
255        data
256    }
257
258    #[inline]
259    fn realloc_typed_buffer(
260        ptr: *mut u64,
261        kind: ArrayElementKind,
262        old_cap: usize,
263        new_cap: usize,
264    ) -> *mut u64 {
265        let old_layout = Self::typed_layout(kind, old_cap)
266            .expect("typed_layout must exist for old typed allocation");
267        let new_layout = Self::typed_layout(kind, new_cap)
268            .expect("typed_layout must exist for new typed allocation");
269        let data =
270            unsafe { alloc::realloc(ptr as *mut u8, old_layout, new_layout.size()) } as *mut u64;
271        if data.is_null() {
272            alloc::handle_alloc_error(new_layout);
273        }
274        data
275    }
276
277    #[inline]
278    fn dealloc_typed_buffer(ptr: *mut u64, kind: ArrayElementKind, cap: usize) {
279        if ptr.is_null() {
280            return;
281        }
282        if let Some(layout) = Self::typed_layout(kind, cap) {
283            unsafe {
284                alloc::dealloc(ptr as *mut u8, layout);
285            }
286        }
287    }
288
289    #[inline]
290    fn kind(&self) -> ArrayElementKind {
291        ArrayElementKind::from_byte(self.element_kind)
292    }
293
294    #[inline]
295    fn set_kind(&mut self, kind: ArrayElementKind) {
296        self.element_kind = kind.as_byte();
297    }
298
299    #[inline]
300    fn typed_storage_kind(&self) -> ArrayElementKind {
301        ArrayElementKind::from_byte(self.typed_storage_kind)
302    }
303
304    #[inline]
305    pub fn element_kind(&self) -> ArrayElementKind {
306        self.kind()
307    }
308
309    #[inline]
310    pub fn typed_data_ptr(&self) -> *const u64 {
311        self.typed_data
312    }
313
314    #[inline]
315    fn try_number_to_i64(bits: u64) -> Option<i64> {
316        if !is_number(bits) {
317            return None;
318        }
319        let n = unbox_number(bits);
320        if !n.is_finite() || n < i64::MIN as f64 || n > i64::MAX as f64 {
321            return None;
322        }
323        let i = n as i64;
324        if (i as f64) == n { Some(i) } else { None }
325    }
326
327    fn infer_kind(elements: &[u64]) -> ArrayElementKind {
328        if elements.is_empty() {
329            return ArrayElementKind::Untyped;
330        }
331
332        if elements
333            .iter()
334            .all(|&v| v == TAG_BOOL_TRUE || v == TAG_BOOL_FALSE)
335        {
336            return ArrayElementKind::Bool;
337        }
338
339        let all_numbers = elements.iter().all(|&v| is_number(v));
340        if !all_numbers {
341            return ArrayElementKind::Untyped;
342        }
343
344        if elements
345            .iter()
346            .all(|&v| Self::try_number_to_i64(v).is_some())
347        {
348            ArrayElementKind::Int64
349        } else {
350            ArrayElementKind::Float64
351        }
352    }
353
354    fn bootstrap_kind_from_first_value(value: u64) -> ArrayElementKind {
355        if value == TAG_BOOL_TRUE || value == TAG_BOOL_FALSE {
356            ArrayElementKind::Bool
357        } else if is_number(value) {
358            // Prefer Float64 for push-built numeric arrays to avoid
359            // accidental integer pinning in float-heavy kernels.
360            ArrayElementKind::Float64
361        } else {
362            ArrayElementKind::Untyped
363        }
364    }
365
366    fn ensure_typed_buffer(&mut self, kind: ArrayElementKind) {
367        if self.cap == 0 || kind == ArrayElementKind::Untyped {
368            return;
369        }
370        if self.typed_data.is_null() {
371            self.typed_data = Self::alloc_typed_buffer(kind, self.cap as usize);
372            self.typed_storage_kind = kind.as_byte();
373            return;
374        }
375        let current = self.typed_storage_kind();
376        if current != kind {
377            Self::dealloc_typed_buffer(self.typed_data, current, self.cap as usize);
378            self.typed_data = Self::alloc_typed_buffer(kind, self.cap as usize);
379            self.typed_storage_kind = kind.as_byte();
380        }
381    }
382
383    fn write_typed_slot(&mut self, index: usize, boxed_value: u64) -> bool {
384        if self.typed_data.is_null() || index >= self.cap as usize {
385            return false;
386        }
387
388        let kind = self.kind();
389        let raw = match kind {
390            ArrayElementKind::Untyped => return false,
391            ArrayElementKind::Float64 => {
392                if !is_number(boxed_value) {
393                    return false;
394                }
395                boxed_value
396            }
397            ArrayElementKind::Int64 => match Self::try_number_to_i64(boxed_value) {
398                Some(v) => v as u64,
399                None => return false,
400            },
401            ArrayElementKind::Bool => {
402                if boxed_value == TAG_BOOL_TRUE {
403                    1
404                } else if boxed_value == TAG_BOOL_FALSE {
405                    0
406                } else {
407                    return false;
408                }
409            }
410            // Width-specific types: extract f64, cast to target type.
411            ArrayElementKind::I8 | ArrayElementKind::I16 | ArrayElementKind::I32
412            | ArrayElementKind::U8 | ArrayElementKind::U16 | ArrayElementKind::U32
413            | ArrayElementKind::U64 | ArrayElementKind::F32 => {
414                if !is_number(boxed_value) {
415                    return false;
416                }
417                let f = unbox_number(boxed_value);
418                // Store the truncated integer or f32 bits in the low bytes.
419                // The write below uses typed_data stride = 8 bytes per slot,
420                // which is correct for all types (overallocated for < 8-byte types).
421                f as i64 as u64
422            }
423        };
424
425        match kind {
426            ArrayElementKind::Bool => {
427                let byte_idx = index >> 3;
428                let bit_idx = (index & 7) as u8;
429                let mask = 1u8 << bit_idx;
430                let byte_ptr = self.typed_data as *mut u8;
431                unsafe {
432                    let prev = *byte_ptr.add(byte_idx);
433                    let next = if raw == 0 { prev & !mask } else { prev | mask };
434                    *byte_ptr.add(byte_idx) = next;
435                }
436                true
437            }
438            _ => {
439                unsafe {
440                    *self.typed_data.add(index) = raw;
441                }
442                true
443            }
444        }
445    }
446
447    fn initialize_typed_from_boxed(&mut self, elements: &[u64]) {
448        let kind = Self::infer_kind(elements);
449        if kind == ArrayElementKind::Untyped {
450            self.set_kind(ArrayElementKind::Untyped);
451            return;
452        }
453
454        self.ensure_typed_buffer(kind);
455        if self.typed_data.is_null() {
456            self.set_kind(ArrayElementKind::Untyped);
457            return;
458        }
459
460        self.set_kind(kind);
461        for (idx, &value) in elements.iter().enumerate() {
462            if !self.write_typed_slot(idx, value) {
463                self.set_kind(ArrayElementKind::Untyped);
464                return;
465            }
466        }
467    }
468
469    fn update_typed_on_write(&mut self, index: usize, boxed_value: u64) {
470        let kind = self.kind();
471
472        if kind == ArrayElementKind::Untyped {
473            if self.len == 0 && index == 0 {
474                let bootstrap = Self::bootstrap_kind_from_first_value(boxed_value);
475                if bootstrap != ArrayElementKind::Untyped {
476                    self.ensure_typed_buffer(bootstrap);
477                    if !self.typed_data.is_null() {
478                        self.set_kind(bootstrap);
479                        if !self.write_typed_slot(index, boxed_value) {
480                            self.set_kind(ArrayElementKind::Untyped);
481                        }
482                    }
483                }
484            }
485            return;
486        }
487
488        if !self.write_typed_slot(index, boxed_value) {
489            // Keep buffer allocated; dropping kind gates correctness.
490            self.set_kind(ArrayElementKind::Untyped);
491        }
492    }
493
494    /// Number of elements.
495    #[inline]
496    pub fn len(&self) -> usize {
497        self.len as usize
498    }
499
500    /// Check if empty.
501    #[inline]
502    pub fn is_empty(&self) -> bool {
503        self.len == 0
504    }
505
506    /// View elements as a slice.
507    #[inline]
508    pub fn as_slice(&self) -> &[u64] {
509        if self.data.is_null() || self.len == 0 {
510            return &[];
511        }
512        unsafe { slice::from_raw_parts(self.data, self.len as usize) }
513    }
514
515    /// View elements as a mutable slice.
516    #[inline]
517    pub fn as_mut_slice(&mut self) -> &mut [u64] {
518        if self.data.is_null() || self.len == 0 {
519            return &mut [];
520        }
521        unsafe { slice::from_raw_parts_mut(self.data, self.len as usize) }
522    }
523
524    /// Get element by index (bounds-checked).
525    #[inline]
526    pub fn get(&self, index: usize) -> Option<&u64> {
527        if index < self.len as usize {
528            unsafe { Some(&*self.data.add(index)) }
529        } else {
530            None
531        }
532    }
533
534    /// Set an element by index (bounds-checked).
535    /// Returns true when the write succeeded.
536    pub fn set_boxed(&mut self, index: usize, value: u64) -> bool {
537        if index >= self.len as usize {
538            return false;
539        }
540        unsafe {
541            *self.data.add(index) = value;
542        }
543        self.update_typed_on_write(index, value);
544        true
545    }
546
547    /// Push an element (amortized O(1) with doubling growth).
548    pub fn push(&mut self, value: u64) {
549        if self.len == self.cap {
550            self.grow();
551        }
552        let index = self.len as usize;
553        unsafe {
554            *self.data.add(index) = value;
555        }
556        self.update_typed_on_write(index, value);
557        self.len += 1;
558    }
559
560    /// Ensure capacity is at least `min_capacity` elements.
561    pub fn reserve(&mut self, min_capacity: usize) {
562        if min_capacity <= self.cap as usize {
563            return;
564        }
565        let mut new_cap = if self.cap == 0 {
566            4usize
567        } else {
568            self.cap as usize
569        };
570        while new_cap < min_capacity {
571            new_cap = new_cap.saturating_mul(2);
572        }
573        self.grow_to(new_cap);
574    }
575
576    /// Pop the last element.
577    pub fn pop(&mut self) -> Option<u64> {
578        if self.len == 0 {
579            return None;
580        }
581        self.len -= 1;
582        unsafe { Some(*self.data.add(self.len as usize)) }
583    }
584
585    /// Iterate over elements.
586    #[inline]
587    pub fn iter(&self) -> slice::Iter<'_, u64> {
588        self.as_slice().iter()
589    }
590
591    /// Get first element.
592    #[inline]
593    pub fn first(&self) -> Option<&u64> {
594        if self.len > 0 {
595            unsafe { Some(&*self.data) }
596        } else {
597            None
598        }
599    }
600
601    /// Get last element.
602    #[inline]
603    pub fn last(&self) -> Option<&u64> {
604        if self.len > 0 {
605            unsafe { Some(&*self.data.add(self.len as usize - 1)) }
606        } else {
607            None
608        }
609    }
610
611    /// Deep copy of element buffer.
612    pub fn clone_data(&self) -> Self {
613        Self::from_slice(self.as_slice())
614    }
615
616    /// Convert to Vec<u64> for interop with remaining Rust code paths.
617    pub fn into_vec(self) -> Vec<u64> {
618        let vec = self.as_slice().to_vec();
619        // Don't drop self normally — we've copied the data.
620        // The original buffer will be freed by Drop.
621        vec
622    }
623
624    /// Raw pointer to data buffer (for JIT inline access).
625    #[inline]
626    pub fn as_ptr(&self) -> *const u64 {
627        self.data
628    }
629
630    /// Grow the buffer using amortized doubling.
631    fn grow(&mut self) {
632        let new_cap = if self.cap == 0 { 4 } else { self.cap * 2 };
633        self.grow_to(new_cap as usize);
634    }
635
636    /// Reallocate element storage to `new_cap` entries.
637    fn grow_to(&mut self, new_cap: usize) {
638        let old_cap = self.cap as usize;
639
640        self.data = if self.data.is_null() {
641            Self::alloc_u64_buffer(new_cap)
642        } else {
643            Self::realloc_u64_buffer(self.data, old_cap, new_cap)
644        };
645
646        if !self.typed_data.is_null() {
647            let typed_kind = self.typed_storage_kind();
648            self.typed_data = if old_cap == 0 {
649                Self::alloc_typed_buffer(typed_kind, new_cap)
650            } else {
651                Self::realloc_typed_buffer(self.typed_data, typed_kind, old_cap, new_cap)
652            };
653        }
654
655        self.cap = new_cap as u64;
656    }
657}
658
659impl Drop for JitArray {
660    fn drop(&mut self) {
661        if !self.data.is_null() && self.cap > 0 {
662            Self::dealloc_u64_buffer(self.data, self.cap as usize);
663        }
664        if !self.typed_data.is_null() && self.cap > 0 {
665            let typed_kind = self.typed_storage_kind();
666            Self::dealloc_typed_buffer(self.typed_data, typed_kind, self.cap as usize);
667        }
668        // Drop the leaked Arc<MatrixData> if this was a FloatArraySlice.
669        if !self.slice_parent_arc.is_null() {
670            unsafe {
671                let _ = std::sync::Arc::from_raw(
672                    self.slice_parent_arc as *const shape_value::heap_value::MatrixData,
673                );
674            }
675        }
676    }
677}
678
679// Index access.
680impl std::ops::Index<usize> for JitArray {
681    type Output = u64;
682
683    #[inline]
684    fn index(&self, index: usize) -> &u64 {
685        assert!(index < self.len as usize, "JitArray index out of bounds");
686        unsafe { &*self.data.add(index) }
687    }
688}
689
690impl std::ops::IndexMut<usize> for JitArray {
691    #[inline]
692    fn index_mut(&mut self, index: usize) -> &mut u64 {
693        assert!(index < self.len as usize, "JitArray index out of bounds");
694        unsafe { &mut *self.data.add(index) }
695    }
696}
697
698#[cfg(test)]
699mod tests {
700    use super::*;
701    use crate::nan_boxing::box_number;
702
703    #[test]
704    fn test_repr_c_layout() {
705        assert_eq!(std::mem::offset_of!(JitArray, data), DATA_OFFSET as usize);
706        assert_eq!(std::mem::offset_of!(JitArray, len), LEN_OFFSET as usize);
707        assert_eq!(std::mem::offset_of!(JitArray, cap), CAP_OFFSET as usize);
708        assert_eq!(
709            std::mem::offset_of!(JitArray, typed_data),
710            TYPED_DATA_OFFSET as usize
711        );
712        assert_eq!(
713            std::mem::offset_of!(JitArray, element_kind),
714            ELEMENT_KIND_OFFSET as usize
715        );
716        // 40 base + 8 (slice_parent_arc ptr) + 4 (slice_offset) + 4 (slice_len) = 56
717        assert_eq!(std::mem::size_of::<JitArray>(), 56);
718    }
719
720    #[test]
721    fn test_new_empty() {
722        let arr = JitArray::new();
723        assert_eq!(arr.len(), 0);
724        assert!(arr.is_empty());
725        let empty: &[u64] = &[];
726        assert_eq!(arr.as_slice(), empty);
727        assert_eq!(arr.element_kind(), ArrayElementKind::Untyped);
728    }
729
730    #[test]
731    fn test_from_slice() {
732        let arr = JitArray::from_slice(&[1u64, 2, 3]);
733        assert_eq!(arr.len(), 3);
734        assert_eq!(arr.as_slice(), &[1u64, 2, 3]);
735    }
736
737    #[test]
738    fn test_from_vec() {
739        let arr = JitArray::from_vec(vec![10, 20, 30]);
740        assert_eq!(arr.len(), 3);
741        assert_eq!(arr.as_slice(), &[10, 20, 30]);
742    }
743
744    #[test]
745    fn test_push_pop() {
746        let mut arr = JitArray::new();
747        arr.push(1);
748        arr.push(2);
749        arr.push(3);
750        assert_eq!(arr.len(), 3);
751        assert_eq!(arr.as_slice(), &[1, 2, 3]);
752
753        assert_eq!(arr.pop(), Some(3));
754        assert_eq!(arr.pop(), Some(2));
755        assert_eq!(arr.len(), 1);
756        assert_eq!(arr.pop(), Some(1));
757        assert_eq!(arr.pop(), None);
758    }
759
760    #[test]
761    fn test_get() {
762        let arr = JitArray::from_slice(&[10, 20, 30]);
763        assert_eq!(arr.get(0), Some(&10));
764        assert_eq!(arr.get(2), Some(&30));
765        assert_eq!(arr.get(3), None);
766    }
767
768    #[test]
769    fn test_first_last() {
770        let arr = JitArray::from_slice(&[10, 20, 30]);
771        assert_eq!(arr.first(), Some(&10));
772        assert_eq!(arr.last(), Some(&30));
773
774        let empty = JitArray::new();
775        assert_eq!(empty.first(), None);
776        assert_eq!(empty.last(), None);
777    }
778
779    #[test]
780    fn test_clone_data() {
781        let arr = JitArray::from_slice(&[1, 2, 3]);
782        let cloned = arr.clone_data();
783        assert_eq!(cloned.as_slice(), arr.as_slice());
784        // Ensure different buffers.
785        assert_ne!(arr.data, cloned.data);
786    }
787
788    #[test]
789    fn test_into_vec() {
790        let arr = JitArray::from_slice(&[5, 10, 15]);
791        let vec = arr.into_vec();
792        assert_eq!(vec, vec![5, 10, 15]);
793    }
794
795    #[test]
796    fn test_growth() {
797        let mut arr = JitArray::new();
798        for i in 0..100 {
799            arr.push(i);
800        }
801        assert_eq!(arr.len(), 100);
802        for i in 0..100 {
803            assert_eq!(arr[i], i as u64);
804        }
805    }
806
807    #[test]
808    fn test_index_access() {
809        let mut arr = JitArray::from_slice(&[10, 20, 30]);
810        assert_eq!(arr[0], 10);
811        assert_eq!(arr[1], 20);
812        arr[1] = 99;
813        assert_eq!(arr[1], 99);
814    }
815
816    #[test]
817    fn test_set_boxed_updates_value() {
818        let mut arr = JitArray::from_slice(&[10, 20, 30]);
819        assert!(arr.set_boxed(1, 99));
820        assert_eq!(arr[1], 99);
821        assert!(!arr.set_boxed(4, 123));
822    }
823
824    #[test]
825    fn test_with_capacity() {
826        let mut arr = JitArray::with_capacity(10);
827        assert_eq!(arr.len(), 0);
828        assert!(arr.is_empty());
829        arr.push(42);
830        assert_eq!(arr.len(), 1);
831        assert_eq!(arr[0], 42);
832    }
833
834    #[test]
835    fn test_reserve_preserves_existing_elements() {
836        let mut arr = JitArray::from_slice(&[1, 2, 3]);
837        let old_cap = arr.cap;
838        arr.reserve(64);
839        assert!(arr.cap >= 64);
840        assert!(arr.cap >= old_cap);
841        assert_eq!(arr.as_slice(), &[1, 2, 3]);
842    }
843
844    #[test]
845    fn test_iter() {
846        let arr = JitArray::from_slice(&[1, 2, 3]);
847        let sum: u64 = arr.iter().sum();
848        assert_eq!(sum, 6);
849    }
850
851    #[test]
852    fn test_bootstrap_float_kind_on_first_push() {
853        let mut arr = JitArray::new();
854        arr.push(box_number(1.5));
855        assert_eq!(arr.element_kind(), ArrayElementKind::Float64);
856        assert!(!arr.typed_data_ptr().is_null());
857    }
858
859    #[test]
860    fn test_bootstrap_bool_kind_on_first_push() {
861        let mut arr = JitArray::new();
862        arr.push(TAG_BOOL_TRUE);
863        assert_eq!(arr.element_kind(), ArrayElementKind::Bool);
864        assert!(!arr.typed_data_ptr().is_null());
865    }
866
867    #[test]
868    fn test_invalidate_bool_kind_on_non_bool_write() {
869        let mut arr = JitArray::new();
870        arr.push(TAG_BOOL_TRUE);
871        arr.push(TAG_BOOL_FALSE);
872        assert_eq!(arr.element_kind(), ArrayElementKind::Bool);
873        arr.push(box_number(2.0));
874        assert_eq!(arr.element_kind(), ArrayElementKind::Untyped);
875    }
876}