pub mod array;
pub mod array_view; pub mod autograd;
pub mod backend;
pub mod codegen;
pub mod ir;
pub mod llo;
pub mod ops;
pub mod ops_inplace; pub mod startup;
pub use array::{cast_array, promoted_dtype, Array, DType, DTypeValue};
pub use array_view::ArrayView;
pub use autograd::{is_grad_enabled, set_grad_enabled, NoGrad, Tensor}; pub use autograd::{AdaGrad, Adam, Optimizer, RMSprop, SGD}; pub use autograd::{
BatchNorm1d, Conv1d, Dropout, Flatten, Linear, Module, ReLU, Sequential, Sigmoid,
}; pub use autograd::{CrossEntropyLoss, Dataset, MSELoss, Trainer, TrainerBuilder};
pub use backend::dispatch::{get_backend_override, set_backend_override};
pub use llo::reduction::ReductionKind;
pub use llo::ElementwiseKind;
pub use ops::{
abs, acos, add, asin, atan, cos, div, exp, log, mul, pow, relu, sigmoid, sin, softmax, sqrt,
sub, sum, tan, tanh,
};
pub use startup::print_startup_log;
#[cfg(target_arch = "wasm32")]
pub use backend::webgpu::{init_webgpu_wasm, set_webgpu_available_wasm};
#[cfg(test)]
mod tests {
use crate::array::Array;
use crate::ops::{add, div, mul, sub, sum};
#[test]
fn test_add_mul_sum() {
let a = Array::new(vec![3], vec![1.0, 2.0, 3.0]);
let b = Array::new(vec![3], vec![2.0, 2.0, 2.0]);
let c = add(&a, &b).expect("add failed");
assert_eq!(c.data, vec![3.0, 4.0, 5.0]);
let d = mul(&a, &b).expect("mul failed");
assert_eq!(d.data, vec![2.0, 4.0, 6.0]);
let s = sum(&a, None).expect("sum failed");
assert_eq!(s.data, vec![6.0]);
let a2 = Array::new(vec![2], vec![1.0, 0.0]);
let b2 = Array::new(vec![2], vec![2.0, 1.0]);
let sub_res = sub(&b2, &a2).expect("sub failed");
assert_eq!(sub_res.data, vec![1.0, 1.0]);
let div_res = div(&b2, &b2).expect("div failed");
assert_eq!(div_res.data, vec![1.0, 1.0]); }
}