1use serde::{Serialize, Deserialize};
3
4pub mod elementwise;
5pub mod reduction;
6pub mod matmul;
7pub mod linalg;
8pub mod shape;
9pub mod stats;
10pub mod random;
11pub mod model;
12
13pub use elementwise::*;
14pub use reduction::*;
15pub use matmul::*;
16pub use linalg::*;
17pub use shape::*;
18pub use stats::*;
19pub use random::*;
20pub use model::*;
21
22#[derive(Debug, Clone, Serialize, Deserialize)]
24pub struct LLOProgram {
25 pub ops: Vec<LloOp>,
26}
27
28impl LLOProgram {
29 pub fn new() -> Self { Self { ops: vec![] } }
30 pub fn add_op(&mut self, op: LloOp) { self.ops.push(op); }
31}
32
33#[derive(Debug, Clone, Serialize, Deserialize)]
35pub enum LloOp {
36 Elementwise { kind: ElementwiseKind, inputs: Vec<usize>, output_shape: Vec<usize>, strategy: ElementwiseStrategy },
37 Reduction { axis: Option<usize>, inputs: Vec<usize>, output_shape: Vec<usize> },
38 MatMul { a: usize, b: usize, output_shape: Vec<usize> },
39 Shape { kind: ShapeKind, inputs: Vec<usize>, output_shape: Vec<usize> },
41 Stats { kind: StatsKind, inputs: Vec<usize>, output_shape: Vec<usize> },
43 Random { kind: RandomKind, inputs: Vec<usize>, output_shape: Vec<usize> },
45 Model { kind: ModelKind, inputs: Vec<usize>, output_shape: Vec<usize> },
47 Training { kind: TrainingKind, inputs: Vec<usize>, output_shape: Vec<usize> },
49}
50
51pub fn array_to_onnx_tensor(name: &str, array: &crate::array::Array) -> anyhow::Result<OnnxTensor> {
53 let mut data_bytes = Vec::with_capacity(array.data.len() * 4);
55 for &val in &array.data {
56 data_bytes.extend_from_slice(&val.to_le_bytes());
57 }
58
59 Ok(OnnxTensor {
60 name: name.to_string(),
61 dtype: 1, shape: array.shape.clone(),
63 data: data_bytes,
64 })
65}