tract-onnx-opl 0.19.2

Tiny, no-nonsense, self contained, TensorFlow and ONNX inference
Documentation
#![allow(clippy::len_zero)]
#[macro_use]
extern crate educe;
#[macro_use]
extern crate log;

use tract_nnef::internal::*;

pub mod einsum;
pub mod is_inf;
pub mod is_nan;
pub mod lrn;
pub mod ml;
pub mod non_max_suppression;
pub mod multinomial;
pub mod random;

pub trait WithOnnx {
    fn with_onnx(self) -> Self;
    fn enable_onnx(&mut self);
}

impl WithOnnx for tract_nnef::framework::Nnef {
    fn enable_onnx(&mut self) {
        self.enable_tract_core();
        self.registries.push(onnx_opl_registry());
    }
    fn with_onnx(mut self) -> Self {
        self.enable_onnx();
        self
    }
}

fn onnx_opl_registry() -> Registry {
    let mut registry: Registry = Registry::new("tract_onnx");
    ml::register(&mut registry);
    non_max_suppression::register(&mut registry);
    multinomial::register(&mut registry);
    random::register(&mut registry);
    registry.register_element_wise(
        "tract_onnx_isinf",
        TypeId::of::<is_inf::IsInf>(),
        is_inf::dump,
        is_inf::parameters(),
        is_inf::load,
    );
    registry.register_unit_element_wise("tract_onnx_is_nan", &is_nan::IsNan {});
    registry.register_dumper(TypeId::of::<lrn::Lrn>(), lrn::dump);
    registry.register_primitive(
            "tract_onnx_lrn", 
            &lrn::parameters(), 
            &[("output", TypeName::Scalar.tensor())],
            lrn::load
    );
    registry.register_dumper(TypeId::of::<einsum::EinSum>(), einsum::dump);
    registry.register_primitive(
        "tract_onnx_einsum", 
        &einsum::parameters(),
        &[("output", TypeName::Scalar.tensor())],
        einsum::load);
    registry
}