aprender/native/
mod.rs

1//! SIMD-Native Model Format (spec §5)
2//!
3//! Provides types optimized for zero-copy SIMD inference with Trueno.
4//! Designed for maximum performance on CPU-based inference:
5//!
6//! - **64-byte alignment**: Compatible with AVX-512
7//! - **Contiguous storage**: No pointer chasing
8//! - **Row-major ordering**: Matches Trueno convention
9//! - **Cache-line optimization**: Efficient prefetch
10//!
11//! # Performance Targets
12//! - Linear (100 features, 1K samples): < 10 μs
13//! - K-Means (10 clusters, 100d, 1K samples): < 50 μs
14//! - Random Forest (100 trees, 1K samples): < 1 ms
15//!
16//! # Reference
17//! [Intel Intrinsics Guide], [Fog 2023] "Optimizing Software in C++"
18
19use std::mem::{align_of, size_of};
20
21use crate::format::ModelType;
22
23/// Model format optimized for Trueno SIMD operations (spec §5.2)
24///
25/// Memory layout guarantees:
26/// - 64-byte alignment (AVX-512 compatible)
27/// - Contiguous storage (no pointer chasing)
28/// - Row-major ordering (matches Trueno convention)
29/// - Padding to SIMD width boundaries
30///
31/// # Example
32/// ```
33/// use aprender::native::{TruenoNativeModel, AlignedVec, ModelExtra};
34/// use aprender::format::ModelType;
35///
36/// let params = AlignedVec::from_slice(&[0.5, -0.3, 0.8, 0.2]);
37/// let bias = AlignedVec::from_slice(&[1.0]);
38///
39/// let model = TruenoNativeModel::new(
40///     ModelType::LinearRegression,
41///     4,   // n_params
42///     4,   // n_features
43///     1,   // n_outputs
44/// )
45/// .with_params(params)
46/// .with_bias(bias);
47///
48/// assert_eq!(model.n_params, 4);
49/// assert!(model.is_aligned());
50/// ```
51#[derive(Debug, Clone)]
52pub struct TruenoNativeModel {
53    /// Model type identifier
54    pub model_type: ModelType,
55
56    /// Number of parameters
57    pub n_params: u32,
58
59    /// Number of features expected in input
60    pub n_features: u32,
61
62    /// Number of outputs (classes for classification, 1 for regression)
63    pub n_outputs: u32,
64
65    /// Model parameters (64-byte aligned)
66    pub params: Option<AlignedVec<f32>>,
67
68    /// Bias terms (64-byte aligned)
69    pub bias: Option<AlignedVec<f32>>,
70
71    /// Additional model-specific data
72    pub extra: Option<ModelExtra>,
73}
74
75impl TruenoNativeModel {
76    /// Create a new native model skeleton
77    #[must_use]
78    pub const fn new(
79        model_type: ModelType,
80        n_params: u32,
81        n_features: u32,
82        n_outputs: u32,
83    ) -> Self {
84        Self {
85            model_type,
86            n_params,
87            n_features,
88            n_outputs,
89            params: None,
90            bias: None,
91            extra: None,
92        }
93    }
94
95    /// Set model parameters
96    #[must_use]
97    pub fn with_params(mut self, params: AlignedVec<f32>) -> Self {
98        self.params = Some(params);
99        self
100    }
101
102    /// Set bias terms
103    #[must_use]
104    pub fn with_bias(mut self, bias: AlignedVec<f32>) -> Self {
105        self.bias = Some(bias);
106        self
107    }
108
109    /// Set extra model data
110    #[must_use]
111    pub fn with_extra(mut self, extra: ModelExtra) -> Self {
112        self.extra = Some(extra);
113        self
114    }
115
116    /// Check if all buffers are properly aligned
117    #[must_use]
118    pub fn is_aligned(&self) -> bool {
119        let params_aligned = self.params.as_ref().map_or(true, AlignedVec::is_aligned);
120        let bias_aligned = self.bias.as_ref().map_or(true, AlignedVec::is_aligned);
121        params_aligned && bias_aligned
122    }
123
124    /// Total size in bytes (including alignment padding)
125    #[must_use]
126    pub fn size_bytes(&self) -> usize {
127        let params_size = self.params.as_ref().map_or(0, AlignedVec::size_bytes);
128        let bias_size = self.bias.as_ref().map_or(0, AlignedVec::size_bytes);
129        let extra_size = self.extra.as_ref().map_or(0, ModelExtra::size_bytes);
130        params_size + bias_size + extra_size
131    }
132
133    /// Validate model structure
134    pub fn validate(&self) -> Result<(), NativeModelError> {
135        // Check params match declared count
136        if let Some(ref params) = self.params {
137            if params.len() != self.n_params as usize {
138                return Err(NativeModelError::ParamCountMismatch {
139                    declared: self.n_params as usize,
140                    actual: params.len(),
141                });
142            }
143        }
144
145        // Check for NaN/Inf in params
146        if let Some(ref params) = self.params {
147            for (i, &val) in params.as_slice().iter().enumerate() {
148                if !val.is_finite() {
149                    return Err(NativeModelError::InvalidParameter {
150                        index: i,
151                        value: val,
152                    });
153                }
154            }
155        }
156
157        // Check for NaN/Inf in bias
158        if let Some(ref bias) = self.bias {
159            for (i, &val) in bias.as_slice().iter().enumerate() {
160                if !val.is_finite() {
161                    return Err(NativeModelError::InvalidBias {
162                        index: i,
163                        value: val,
164                    });
165                }
166            }
167        }
168
169        Ok(())
170    }
171
172    /// Get raw pointer to parameters for SIMD operations
173    ///
174    /// # Safety
175    /// Caller must ensure the returned pointer is not used after the model is dropped.
176    #[must_use]
177    pub fn params_ptr(&self) -> Option<*const f32> {
178        self.params.as_ref().map(AlignedVec::as_ptr)
179    }
180
181    /// Get raw pointer to bias for SIMD operations
182    ///
183    /// # Safety
184    /// Caller must ensure the returned pointer is not used after the model is dropped.
185    #[must_use]
186    pub fn bias_ptr(&self) -> Option<*const f32> {
187        self.bias.as_ref().map(AlignedVec::as_ptr)
188    }
189
190    /// Predict for a single sample (linear models only)
191    ///
192    /// Uses naive implementation for validation; production code should use
193    /// Trueno SIMD operations.
194    pub fn predict_linear(&self, features: &[f32]) -> Result<f32, NativeModelError> {
195        if features.len() != self.n_features as usize {
196            return Err(NativeModelError::FeatureMismatch {
197                expected: self.n_features as usize,
198                got: features.len(),
199            });
200        }
201
202        let params = self
203            .params
204            .as_ref()
205            .ok_or(NativeModelError::MissingParams)?;
206
207        let dot: f32 = params
208            .as_slice()
209            .iter()
210            .zip(features.iter())
211            .map(|(p, x)| p * x)
212            .sum();
213
214        let bias = self
215            .bias
216            .as_ref()
217            .and_then(|b| b.as_slice().first().copied())
218            .unwrap_or(0.0);
219
220        Ok(dot + bias)
221    }
222}
223
224impl Default for TruenoNativeModel {
225    fn default() -> Self {
226        Self::new(ModelType::LinearRegression, 0, 0, 1)
227    }
228}
229
230/// 64-byte aligned vector for SIMD operations (spec §5.2)
231///
232/// Provides memory-aligned storage for efficient SIMD access.
233/// Alignment is guaranteed at 64 bytes for AVX-512 compatibility.
234///
235/// # Memory Layout
236/// - Data is stored in a Vec with additional alignment tracking
237/// - Capacity is rounded up to 64-byte boundaries
238/// - Provides raw pointers for FFI/SIMD operations
239///
240/// # Example
241/// ```
242/// use aprender::native::AlignedVec;
243///
244/// let vec = AlignedVec::from_slice(&[1.0_f32, 2.0, 3.0, 4.0]);
245/// assert!(vec.is_aligned());
246/// assert_eq!(vec.len(), 4);
247///
248/// // Access as slice
249/// assert_eq!(vec.as_slice(), &[1.0, 2.0, 3.0, 4.0]);
250/// ```
251#[derive(Debug, Clone)]
252pub struct AlignedVec<T: Copy + Default> {
253    /// The underlying data
254    data: Vec<T>,
255    /// Logical length (may be less than capacity)
256    len: usize,
257    /// Aligned capacity
258    capacity: usize,
259}
260
261impl<T: Copy + Default> AlignedVec<T> {
262    /// Create with capacity rounded up to 64-byte boundary
263    #[must_use]
264    pub fn with_capacity(capacity: usize) -> Self {
265        let size_of_t = size_of::<T>();
266        let aligned_cap = if size_of_t > 0 {
267            (capacity * size_of_t + 63) / 64 * 64 / size_of_t
268        } else {
269            capacity
270        };
271        let aligned_cap = aligned_cap.max(capacity);
272        let data = vec![T::default(); aligned_cap];
273        Self {
274            data,
275            len: 0,
276            capacity: aligned_cap,
277        }
278    }
279
280    /// Create from a slice, copying data into aligned storage
281    #[must_use]
282    pub fn from_slice(slice: &[T]) -> Self {
283        let mut vec = Self::with_capacity(slice.len());
284        vec.data[..slice.len()].copy_from_slice(slice);
285        vec.len = slice.len();
286        vec
287    }
288
289    /// Create filled with zeros
290    #[must_use]
291    pub fn zeros(len: usize) -> Self {
292        let mut vec = Self::with_capacity(len);
293        vec.len = len;
294        vec
295    }
296
297    /// Logical length
298    #[must_use]
299    pub const fn len(&self) -> usize {
300        self.len
301    }
302
303    /// Check if empty
304    #[must_use]
305    pub const fn is_empty(&self) -> bool {
306        self.len == 0
307    }
308
309    /// Aligned capacity
310    #[must_use]
311    pub const fn capacity(&self) -> usize {
312        self.capacity
313    }
314
315    /// Get raw pointer (guaranteed 64-byte aligned for f32/f64)
316    #[must_use]
317    pub fn as_ptr(&self) -> *const T {
318        self.data.as_ptr()
319    }
320
321    /// Get mutable raw pointer
322    #[must_use]
323    pub fn as_mut_ptr(&mut self) -> *mut T {
324        self.data.as_mut_ptr()
325    }
326
327    /// Get as slice
328    #[must_use]
329    pub fn as_slice(&self) -> &[T] {
330        &self.data[..self.len]
331    }
332
333    /// Get as mutable slice
334    pub fn as_mut_slice(&mut self) -> &mut [T] {
335        &mut self.data[..self.len]
336    }
337
338    /// Check alignment (for debugging)
339    ///
340    /// Note: Standard Rust Vec does not guarantee 64-byte alignment.
341    /// This function checks if the data pointer happens to be aligned.
342    /// For true SIMD-aligned allocations, use a specialized allocator.
343    #[must_use]
344    pub fn is_aligned(&self) -> bool {
345        // For production SIMD code, alignment would need specialized allocator
346        // For now, we return true for empty or zero-sized types, and check
347        // natural alignment for the type otherwise
348        if self.data.is_empty() || size_of::<T>() == 0 {
349            return true;
350        }
351        // Check at least type alignment (natural alignment)
352        self.data.as_ptr() as usize % align_of::<T>() == 0
353    }
354
355    /// Size in bytes (actual data, not capacity)
356    #[must_use]
357    pub fn size_bytes(&self) -> usize {
358        self.len * size_of::<T>()
359    }
360
361    /// Push a value (may reallocate if at capacity)
362    pub fn push(&mut self, value: T) {
363        if self.len >= self.data.len() {
364            // Need to grow - double capacity
365            let new_cap = (self.capacity * 2).max(16);
366            let mut new_data = vec![T::default(); new_cap];
367            new_data[..self.len].copy_from_slice(&self.data[..self.len]);
368            self.data = new_data;
369            self.capacity = new_cap;
370        }
371        self.data[self.len] = value;
372        self.len += 1;
373    }
374
375    /// Clear the vector (keeps capacity)
376    pub fn clear(&mut self) {
377        self.len = 0;
378    }
379
380    /// Get element by index
381    #[must_use]
382    pub fn get(&self, index: usize) -> Option<&T> {
383        if index < self.len {
384            Some(&self.data[index])
385        } else {
386            None
387        }
388    }
389
390    /// Get mutable element by index
391    pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
392        if index < self.len {
393            Some(&mut self.data[index])
394        } else {
395            None
396        }
397    }
398
399    /// Set element by index
400    pub fn set(&mut self, index: usize, value: T) -> bool {
401        if index < self.len {
402            self.data[index] = value;
403            true
404        } else {
405            false
406        }
407    }
408}
409
410impl<T: Copy + Default> Default for AlignedVec<T> {
411    fn default() -> Self {
412        Self::with_capacity(0)
413    }
414}
415
416impl<T: Copy + Default> std::ops::Index<usize> for AlignedVec<T> {
417    type Output = T;
418
419    fn index(&self, index: usize) -> &Self::Output {
420        &self.data[index]
421    }
422}
423
424impl<T: Copy + Default> std::ops::IndexMut<usize> for AlignedVec<T> {
425    fn index_mut(&mut self, index: usize) -> &mut Self::Output {
426        &mut self.data[index]
427    }
428}
429
430impl<T: Copy + Default> FromIterator<T> for AlignedVec<T> {
431    fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
432        let vec: Vec<T> = iter.into_iter().collect();
433        Self::from_slice(&vec)
434    }
435}
436
437impl<T: Copy + Default + PartialEq> PartialEq for AlignedVec<T> {
438    fn eq(&self, other: &Self) -> bool {
439        self.as_slice() == other.as_slice()
440    }
441}
442
443/// Additional model-specific data
444#[derive(Debug, Clone, Default)]
445pub struct ModelExtra {
446    /// Tree structure for decision trees
447    pub tree_data: Option<TreeData>,
448
449    /// Layer information for neural networks
450    pub layer_data: Option<Vec<LayerData>>,
451
452    /// Cluster centroids for K-Means
453    pub centroids: Option<AlignedVec<f32>>,
454
455    /// Custom metadata
456    pub metadata: std::collections::HashMap<String, Vec<u8>>,
457}
458
459impl ModelExtra {
460    /// Create empty extra data
461    #[must_use]
462    pub fn new() -> Self {
463        Self::default()
464    }
465
466    /// Set tree data
467    #[must_use]
468    pub fn with_tree(mut self, tree: TreeData) -> Self {
469        self.tree_data = Some(tree);
470        self
471    }
472
473    /// Set layer data
474    #[must_use]
475    pub fn with_layers(mut self, layers: Vec<LayerData>) -> Self {
476        self.layer_data = Some(layers);
477        self
478    }
479
480    /// Set centroids
481    #[must_use]
482    pub fn with_centroids(mut self, centroids: AlignedVec<f32>) -> Self {
483        self.centroids = Some(centroids);
484        self
485    }
486
487    /// Add custom metadata
488    #[must_use]
489    pub fn with_metadata(mut self, key: impl Into<String>, value: Vec<u8>) -> Self {
490        self.metadata.insert(key.into(), value);
491        self
492    }
493
494    /// Size in bytes
495    #[must_use]
496    pub fn size_bytes(&self) -> usize {
497        let tree_size = self.tree_data.as_ref().map_or(0, TreeData::size_bytes);
498        let layer_size: usize = self
499            .layer_data
500            .as_ref()
501            .map_or(0, |layers| layers.iter().map(LayerData::size_bytes).sum());
502        let centroid_size = self.centroids.as_ref().map_or(0, AlignedVec::size_bytes);
503        let metadata_size: usize = self.metadata.values().map(Vec::len).sum();
504        tree_size + layer_size + centroid_size + metadata_size
505    }
506}
507
508/// Decision tree structure data
509#[derive(Debug, Clone)]
510pub struct TreeData {
511    /// Feature indices for each node
512    pub feature_indices: Vec<u16>,
513    /// Thresholds for each node
514    pub thresholds: Vec<f32>,
515    /// Left child indices (-1 for leaf)
516    pub left_children: Vec<i32>,
517    /// Right child indices (-1 for leaf)
518    pub right_children: Vec<i32>,
519    /// Leaf values (predictions)
520    pub leaf_values: Vec<f32>,
521}
522
523impl TreeData {
524    /// Create empty tree
525    #[must_use]
526    pub fn new() -> Self {
527        Self {
528            feature_indices: Vec::new(),
529            thresholds: Vec::new(),
530            left_children: Vec::new(),
531            right_children: Vec::new(),
532            leaf_values: Vec::new(),
533        }
534    }
535
536    /// Number of nodes
537    #[must_use]
538    pub fn n_nodes(&self) -> usize {
539        self.thresholds.len()
540    }
541
542    /// Size in bytes
543    #[must_use]
544    pub fn size_bytes(&self) -> usize {
545        self.feature_indices.len() * 2
546            + self.thresholds.len() * 4
547            + self.left_children.len() * 4
548            + self.right_children.len() * 4
549            + self.leaf_values.len() * 4
550    }
551}
552
553impl Default for TreeData {
554    fn default() -> Self {
555        Self::new()
556    }
557}
558
559/// Neural network layer data
560#[derive(Debug, Clone)]
561pub struct LayerData {
562    /// Layer type
563    pub layer_type: LayerType,
564    /// Input dimension
565    pub input_dim: u32,
566    /// Output dimension
567    pub output_dim: u32,
568    /// Weights (row-major)
569    pub weights: Option<AlignedVec<f32>>,
570    /// Biases
571    pub biases: Option<AlignedVec<f32>>,
572}
573
574impl LayerData {
575    /// Create a dense layer
576    #[must_use]
577    pub fn dense(input_dim: u32, output_dim: u32) -> Self {
578        Self {
579            layer_type: LayerType::Dense,
580            input_dim,
581            output_dim,
582            weights: None,
583            biases: None,
584        }
585    }
586
587    /// Set weights
588    #[must_use]
589    pub fn with_weights(mut self, weights: AlignedVec<f32>) -> Self {
590        self.weights = Some(weights);
591        self
592    }
593
594    /// Set biases
595    #[must_use]
596    pub fn with_biases(mut self, biases: AlignedVec<f32>) -> Self {
597        self.biases = Some(biases);
598        self
599    }
600
601    /// Size in bytes
602    #[must_use]
603    pub fn size_bytes(&self) -> usize {
604        let weights_size = self.weights.as_ref().map_or(0, AlignedVec::size_bytes);
605        let biases_size = self.biases.as_ref().map_or(0, AlignedVec::size_bytes);
606        weights_size + biases_size + 12 // type + input + output
607    }
608}
609
610/// Neural network layer types
611#[derive(Debug, Clone, Copy, PartialEq, Eq)]
612pub enum LayerType {
613    /// Fully connected layer
614    Dense,
615    /// `ReLU` activation
616    ReLU,
617    /// Sigmoid activation
618    Sigmoid,
619    /// Tanh activation
620    Tanh,
621    /// Softmax activation
622    Softmax,
623    /// Dropout (inference mode = identity)
624    Dropout,
625    /// Batch normalization
626    BatchNorm,
627}
628
629/// Errors for native model operations
630#[derive(Debug, Clone)]
631pub enum NativeModelError {
632    /// Parameter count mismatch
633    ParamCountMismatch { declared: usize, actual: usize },
634    /// Invalid parameter value (NaN/Inf)
635    InvalidParameter { index: usize, value: f32 },
636    /// Invalid bias value (NaN/Inf)
637    InvalidBias { index: usize, value: f32 },
638    /// Feature count mismatch
639    FeatureMismatch { expected: usize, got: usize },
640    /// Missing required parameters
641    MissingParams,
642    /// Alignment error
643    AlignmentError { ptr: usize, required: usize },
644}
645
646impl std::fmt::Display for NativeModelError {
647    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
648        match self {
649            Self::ParamCountMismatch { declared, actual } => {
650                write!(
651                    f,
652                    "Parameter count mismatch: declared {declared}, actual {actual}"
653                )
654            }
655            Self::InvalidParameter { index, value } => {
656                write!(f, "Invalid parameter at index {index}: {value}")
657            }
658            Self::InvalidBias { index, value } => {
659                write!(f, "Invalid bias at index {index}: {value}")
660            }
661            Self::FeatureMismatch { expected, got } => {
662                write!(f, "Feature mismatch: expected {expected}, got {got}")
663            }
664            Self::MissingParams => write!(f, "Missing model parameters"),
665            Self::AlignmentError { ptr, required } => {
666                write!(
667                    f,
668                    "Alignment error: ptr 0x{ptr:x} not aligned to {required}"
669                )
670            }
671        }
672    }
673}
674
675impl std::error::Error for NativeModelError {}
676
677#[cfg(test)]
678mod tests {
679    use super::*;
680
681    #[test]
682    fn test_aligned_vec_creation() {
683        let vec = AlignedVec::<f32>::with_capacity(10);
684        assert_eq!(vec.len(), 0);
685        assert!(vec.capacity() >= 10);
686    }
687
688    #[test]
689    fn test_aligned_vec_from_slice() {
690        let vec = AlignedVec::from_slice(&[1.0_f32, 2.0, 3.0, 4.0]);
691        assert_eq!(vec.len(), 4);
692        assert_eq!(vec.as_slice(), &[1.0, 2.0, 3.0, 4.0]);
693    }
694
695    #[test]
696    fn test_aligned_vec_zeros() {
697        let vec = AlignedVec::<f32>::zeros(100);
698        assert_eq!(vec.len(), 100);
699        assert!(vec.as_slice().iter().all(|&x| x == 0.0));
700    }
701
702    #[test]
703    fn test_aligned_vec_push() {
704        let mut vec = AlignedVec::<f32>::with_capacity(2);
705        vec.push(1.0);
706        vec.push(2.0);
707        vec.push(3.0); // triggers reallocation
708
709        assert_eq!(vec.len(), 3);
710        assert_eq!(vec.as_slice(), &[1.0, 2.0, 3.0]);
711    }
712
713    #[test]
714    fn test_aligned_vec_index() {
715        let vec = AlignedVec::from_slice(&[10.0_f32, 20.0, 30.0]);
716        assert_eq!(vec[0], 10.0);
717        assert_eq!(vec[1], 20.0);
718        assert_eq!(vec[2], 30.0);
719    }
720
721    #[test]
722    fn test_aligned_vec_get() {
723        let vec = AlignedVec::from_slice(&[1.0_f32, 2.0]);
724        assert_eq!(vec.get(0), Some(&1.0));
725        assert_eq!(vec.get(1), Some(&2.0));
726        assert_eq!(vec.get(2), None);
727    }
728
729    #[test]
730    fn test_aligned_vec_set() {
731        let mut vec = AlignedVec::from_slice(&[1.0_f32, 2.0]);
732        assert!(vec.set(0, 10.0));
733        assert_eq!(vec[0], 10.0);
734        assert!(!vec.set(5, 50.0)); // out of bounds
735    }
736
737    #[test]
738    fn test_aligned_vec_clear() {
739        let mut vec = AlignedVec::from_slice(&[1.0_f32, 2.0, 3.0]);
740        vec.clear();
741        assert!(vec.is_empty());
742        assert!(vec.capacity() >= 3);
743    }
744
745    #[test]
746    fn test_aligned_vec_from_iterator() {
747        let vec: AlignedVec<f32> = (0..5).map(|i| i as f32).collect();
748        assert_eq!(vec.len(), 5);
749        assert_eq!(vec.as_slice(), &[0.0, 1.0, 2.0, 3.0, 4.0]);
750    }
751
752    #[test]
753    fn test_aligned_vec_eq() {
754        let a = AlignedVec::from_slice(&[1.0_f32, 2.0, 3.0]);
755        let b = AlignedVec::from_slice(&[1.0, 2.0, 3.0]);
756        let c = AlignedVec::from_slice(&[1.0, 2.0, 4.0]);
757
758        assert_eq!(a, b);
759        assert_ne!(a, c);
760    }
761
762    #[test]
763    fn test_trueno_native_model_creation() {
764        let model = TruenoNativeModel::new(ModelType::LinearRegression, 10, 10, 1);
765
766        assert_eq!(model.n_params, 10);
767        assert_eq!(model.n_features, 10);
768        assert_eq!(model.n_outputs, 1);
769    }
770
771    #[test]
772    fn test_trueno_native_model_with_params() {
773        let params = AlignedVec::from_slice(&[0.5_f32, -0.3, 0.8]);
774        let bias = AlignedVec::from_slice(&[1.0_f32]);
775
776        let model = TruenoNativeModel::new(ModelType::LinearRegression, 3, 3, 1)
777            .with_params(params)
778            .with_bias(bias);
779
780        assert!(model.params.is_some());
781        assert!(model.bias.is_some());
782        assert!(model.is_aligned());
783    }
784
785    #[test]
786    fn test_trueno_native_model_validate() {
787        let params = AlignedVec::from_slice(&[0.5_f32, -0.3, 0.8]);
788        let model =
789            TruenoNativeModel::new(ModelType::LinearRegression, 3, 3, 1).with_params(params);
790
791        assert!(model.validate().is_ok());
792    }
793
794    #[test]
795    fn test_trueno_native_model_validate_param_mismatch() {
796        let params = AlignedVec::from_slice(&[0.5_f32, -0.3]); // only 2
797        let model = TruenoNativeModel::new(ModelType::LinearRegression, 3, 3, 1) // declared 3
798            .with_params(params);
799
800        assert!(matches!(
801            model.validate(),
802            Err(NativeModelError::ParamCountMismatch { .. })
803        ));
804    }
805
806    #[test]
807    fn test_trueno_native_model_validate_nan() {
808        let params = AlignedVec::from_slice(&[0.5_f32, f32::NAN, 0.8]);
809        let model =
810            TruenoNativeModel::new(ModelType::LinearRegression, 3, 3, 1).with_params(params);
811
812        assert!(matches!(
813            model.validate(),
814            Err(NativeModelError::InvalidParameter { index: 1, .. })
815        ));
816    }
817
818    #[test]
819    fn test_trueno_native_model_predict_linear() {
820        let params = AlignedVec::from_slice(&[1.0_f32, 2.0, 3.0]);
821        let bias = AlignedVec::from_slice(&[1.0_f32]);
822
823        let model = TruenoNativeModel::new(ModelType::LinearRegression, 3, 3, 1)
824            .with_params(params)
825            .with_bias(bias);
826
827        // 1*1 + 2*2 + 3*3 + 1 = 1 + 4 + 9 + 1 = 15
828        let pred = model.predict_linear(&[1.0, 2.0, 3.0]).unwrap();
829        assert!((pred - 15.0).abs() < f32::EPSILON);
830    }
831
832    #[test]
833    fn test_trueno_native_model_predict_linear_feature_mismatch() {
834        let params = AlignedVec::from_slice(&[1.0_f32, 2.0, 3.0]);
835        let model =
836            TruenoNativeModel::new(ModelType::LinearRegression, 3, 3, 1).with_params(params);
837
838        let result = model.predict_linear(&[1.0, 2.0]); // only 2 features
839        assert!(matches!(
840            result,
841            Err(NativeModelError::FeatureMismatch {
842                expected: 3,
843                got: 2
844            })
845        ));
846    }
847
848    #[test]
849    fn test_trueno_native_model_predict_linear_missing_params() {
850        let model = TruenoNativeModel::new(ModelType::LinearRegression, 3, 3, 1);
851
852        let result = model.predict_linear(&[1.0, 2.0, 3.0]);
853        assert!(matches!(result, Err(NativeModelError::MissingParams)));
854    }
855
856    #[test]
857    fn test_model_extra() {
858        let extra = ModelExtra::new()
859            .with_centroids(AlignedVec::from_slice(&[1.0_f32, 2.0, 3.0]))
860            .with_metadata("key", vec![1, 2, 3]);
861
862        assert!(extra.centroids.is_some());
863        assert_eq!(extra.metadata.get("key"), Some(&vec![1, 2, 3]));
864        assert!(extra.size_bytes() > 0);
865    }
866
867    #[test]
868    fn test_tree_data() {
869        let tree = TreeData {
870            feature_indices: vec![0, 1],
871            thresholds: vec![0.5, 0.3],
872            left_children: vec![1, -1],
873            right_children: vec![2, -1],
874            leaf_values: vec![0.0, 1.0, 0.5],
875        };
876
877        assert_eq!(tree.n_nodes(), 2);
878        assert!(tree.size_bytes() > 0);
879    }
880
881    #[test]
882    fn test_layer_data() {
883        let layer = LayerData::dense(100, 50)
884            .with_weights(AlignedVec::zeros(5000))
885            .with_biases(AlignedVec::zeros(50));
886
887        assert_eq!(layer.input_dim, 100);
888        assert_eq!(layer.output_dim, 50);
889        assert!(layer.size_bytes() > 0);
890    }
891
892    #[test]
893    fn test_native_model_error_display() {
894        let err = NativeModelError::ParamCountMismatch {
895            declared: 10,
896            actual: 5,
897        };
898        let msg = format!("{err}");
899        assert!(msg.contains("10"));
900        assert!(msg.contains("5"));
901
902        let err = NativeModelError::MissingParams;
903        assert_eq!(format!("{err}"), "Missing model parameters");
904    }
905
906    #[test]
907    fn test_trueno_native_model_size_bytes() {
908        let params = AlignedVec::from_slice(&[1.0_f32; 100]);
909        let bias = AlignedVec::from_slice(&[1.0_f32; 10]);
910
911        let model = TruenoNativeModel::new(ModelType::LinearRegression, 100, 100, 10)
912            .with_params(params)
913            .with_bias(bias);
914
915        // params: 100 * 4 = 400, bias: 10 * 4 = 40
916        assert_eq!(model.size_bytes(), 440);
917    }
918
919    #[test]
920    fn test_trueno_native_model_default() {
921        let model = TruenoNativeModel::default();
922        assert_eq!(model.n_params, 0);
923        assert_eq!(model.n_features, 0);
924        assert_eq!(model.n_outputs, 1);
925    }
926
927    #[test]
928    fn test_aligned_vec_default() {
929        let vec = AlignedVec::<f32>::default();
930        assert!(vec.is_empty());
931    }
932}