#![allow(missing_docs)]
use super::{Backend, ComputeOp};
use crate::error::TruenoError;
#[derive(Debug, Clone)]
pub struct DotOp {
pub len: usize,
}
impl DotOp {
pub fn new(len: usize) -> Self {
Self { len }
}
}
impl ComputeOp for DotOp {
type Input = (Vec<f32>, Vec<f32>);
type Output = f32;
fn name(&self) -> &'static str {
"dot"
}
fn execute(&self, input: Self::Input, _backend: Backend) -> Result<Self::Output, TruenoError> {
let (a, b) = input;
if a.len() != b.len() {
return Err(TruenoError::SizeMismatch { expected: a.len(), actual: b.len() });
}
let sum: f32 = a.iter().zip(b.iter()).map(|(x, y)| x * y).sum();
Ok(sum)
}
fn tokens(&self, input: &Self::Input) -> usize {
input.0.len()
}
}
#[derive(Debug, Clone)]
pub struct AddOp {
pub len: usize,
}
impl AddOp {
pub fn new(len: usize) -> Self {
Self { len }
}
}
impl ComputeOp for AddOp {
type Input = (Vec<f32>, Vec<f32>);
type Output = Vec<f32>;
fn name(&self) -> &'static str {
"add"
}
fn execute(&self, input: Self::Input, _backend: Backend) -> Result<Self::Output, TruenoError> {
let (a, b) = input;
if a.len() != b.len() {
return Err(TruenoError::SizeMismatch { expected: a.len(), actual: b.len() });
}
Ok(a.iter().zip(b.iter()).map(|(x, y)| x + y).collect())
}
fn tokens(&self, input: &Self::Input) -> usize {
input.0.len()
}
}
#[derive(Debug, Clone)]
pub struct MatmulOp {
pub m: usize,
pub k: usize,
pub n: usize,
}
impl MatmulOp {
pub fn new(m: usize, k: usize, n: usize) -> Self {
Self { m, k, n }
}
}
impl ComputeOp for MatmulOp {
type Input = (Vec<f32>, Vec<f32>);
type Output = Vec<f32>;
fn name(&self) -> &'static str {
"matmul"
}
fn execute(&self, input: Self::Input, _backend: Backend) -> Result<Self::Output, TruenoError> {
let (a, b) = input;
let expected_a = self.m * self.k;
let expected_b = self.k * self.n;
if a.len() != expected_a {
return Err(TruenoError::SizeMismatch { expected: expected_a, actual: a.len() });
}
if b.len() != expected_b {
return Err(TruenoError::SizeMismatch { expected: expected_b, actual: b.len() });
}
let simd_backend = crate::Backend::select_best();
let mat_a = crate::Matrix::from_vec_with_backend(self.m, self.k, a, simd_backend);
let mat_b = crate::Matrix::from_vec_with_backend(self.k, self.n, b, simd_backend);
let result = mat_a.matmul(&mat_b)?;
Ok(result.data)
}
fn tokens(&self, _input: &Self::Input) -> usize {
self.m * self.n
}
}
#[derive(Debug, Clone)]
pub struct SoftmaxOp {
pub len: usize,
}
impl SoftmaxOp {
pub fn new(len: usize) -> Self {
Self { len }
}
}
impl ComputeOp for SoftmaxOp {
type Input = Vec<f32>;
type Output = Vec<f32>;
fn name(&self) -> &'static str {
"softmax"
}
fn execute(&self, input: Self::Input, _backend: Backend) -> Result<Self::Output, TruenoError> {
if input.is_empty() {
return Ok(vec![]);
}
Ok(crate::blis::softmax::softmax_1d_alloc(&input))
}
fn tokens(&self, input: &Self::Input) -> usize {
input.len()
}
}
impl SoftmaxOp {
#[inline]
pub fn is_simd_backend(backend: Backend) -> bool {
matches!(
backend,
Backend::Avx2 | Backend::Avx512 | Backend::Sse2 | Backend::Neon | Backend::Auto
)
}
}
#[cfg(test)]
mod tests;