Expand description
§NumRs Operations API
Este módulo proporciona operaciones de array con dispatch automático al mejor backend.
§Uso directo (RECOMENDADO)
use numrs::ops;
let result = ops::add(&a, &b)?; // Dispatch automático (SIMD/BLAS/GPU)
let sum = ops::sum(&arr, None)?; // Zero-overhead kernel call
let c = ops::matmul(&a, &b)?; // Usa MKL/OpenBLAS/WebGPU según disponibilidad- ✅ Zero overhead (inline + function pointers)
- ✅ Validación funcional de backends
- ✅ Selección automática del mejor kernel
- ✅ Perfecto para wrappers JS/Python
§Organización modular
Las operaciones están organizadas por categoría:
ops::elementwise::binary- Operaciones binarias elemento por elemento (add, mul, etc.)ops::elementwise::unary- Operaciones unarias (sin, cos, sqrt, relu, etc.)ops::reduction- Reducciones (sum, mean, etc.)ops::linalg- Álgebra lineal (matmul, dot, etc.)
Todas las operaciones usan el sistema de dispatch y son zero-cost.
Re-exports§
pub use elementwise::binary::add::add;pub use elementwise::binary::mul::mul;pub use elementwise::binary::div::div;pub use elementwise::binary::sub::sub;pub use elementwise::binary::pow::pow;pub use elementwise::unary::sqrt;pub use elementwise::unary::sqrt;pub use elementwise::unary::sin;pub use elementwise::unary::sin;pub use elementwise::unary::cos;pub use elementwise::unary::cos;pub use elementwise::unary::tan;pub use elementwise::unary::tan;pub use elementwise::unary::abs;pub use elementwise::unary::abs;pub use elementwise::unary::exp;pub use elementwise::unary::exp;pub use elementwise::unary::log;pub use elementwise::unary::log;pub use elementwise::unary::asin;pub use elementwise::unary::asin;pub use elementwise::unary::acos;pub use elementwise::unary::acos;pub use elementwise::unary::atan;pub use elementwise::unary::atan;pub use elementwise::unary::relu;pub use elementwise::unary::relu;pub use elementwise::unary::leaky_relu;pub use elementwise::unary::leaky_relu;pub use elementwise::unary::sigmoid;pub use elementwise::unary::sigmoid;pub use elementwise::unary::tanh;pub use elementwise::unary::tanh;pub use elementwise::unary::softplus;pub use elementwise::unary::softplus;pub use elementwise::unary::neg;pub use elementwise::unary::neg;pub use reduction::sum;pub use reduction::sum;pub use reduction::max;pub use reduction::max;pub use reduction::min;pub use reduction::min;pub use reduction::mean;pub use reduction::mean;pub use reduction::variance;pub use reduction::variance;pub use reduction::argmax;pub use reduction::argmax;pub use linalg::matmul;pub use linalg::matmul;pub use linalg::dot;pub use linalg::dot;pub use shape::reshape;pub use shape::reshape;pub use shape::transpose;pub use shape::transpose;pub use shape::concat;pub use shape::concat;pub use shape::broadcast_to;pub use shape::broadcast_to;pub use shape::flatten;pub use shape::flatten;pub use stats::norm;pub use stats::norm;pub use stats::softmax;pub use stats::softmax;pub use stats::cross_entropy;pub use stats::cross_entropy;pub use model::save_onnx;pub use model::load_onnx;pub use model::save_checkpoint;pub use model::load_checkpoint;pub use model::create_mlp;pub use model::create_linear_node;pub use model::create_relu_node;pub use model::create_softmax_node;pub use model::create_matmul_node;pub use model::create_add_node;pub use model::array_to_onnx_tensor;pub use model::infer;