Struct Model

Source
pub struct Model {
    pub model: NonNull<ANeuralNetworksModel>,
}

Fields§

§model: NonNull<ANeuralNetworksModel>

Implementations§

Source§

impl Model

Source

pub fn new() -> Result<Self>

Source

pub fn from_operands( operands: impl IntoIterator<Item = Operand>, ) -> Result<Self>

Examples found in repository?
examples/add_arrays.rs (lines 7-12)
4fn main() -> nnapi::Result<()> {
5    let tensor9x_type = Operand::tensor(OperandCode::ANEURALNETWORKS_TENSOR_FLOAT32, vec![9], 0., 0);
6
7    let mut model = Model::from_operands([
8        tensor9x_type.clone(),
9        tensor9x_type.clone(),
10        Operand::activation(),
11        tensor9x_type,
12    ])?;
13
14    model.set_activation_operand_value(2)?;
15    model.add_operation(OperationCode::ANEURALNETWORKS_ADD, &[0, 1, 2], &[3])?;
16    model.identify_inputs_and_outputs(&[0, 1], &[3])?;
17
18    model.finish()?;
19
20    let mut compilation = model.compile()?;
21    compilation.finish()?;
22    let mut execution = compilation.create_execution()?;
23
24    // mind datatype: by default, it's f64, but we need f32
25    execution.set_input(0, &[1f32; 9])?;
26    execution.set_input(1, &[2f32; 9])?;
27
28    let mut output = [0f32; 9];
29    execution.set_output(0, &mut output)?;
30
31    let mut end_event = execution.compute()?;
32    end_event.wait()?;
33
34    assert_eq!(output, [3f32; 9]);
35
36    Ok(())
37}
Source

pub fn add_operand(&mut self, operand: &Operand) -> Result<()>

Source

pub fn set_activation_operand_value( &mut self, activation_idx: i32, ) -> Result<()>

Examples found in repository?
examples/add_arrays.rs (line 14)
4fn main() -> nnapi::Result<()> {
5    let tensor9x_type = Operand::tensor(OperandCode::ANEURALNETWORKS_TENSOR_FLOAT32, vec![9], 0., 0);
6
7    let mut model = Model::from_operands([
8        tensor9x_type.clone(),
9        tensor9x_type.clone(),
10        Operand::activation(),
11        tensor9x_type,
12    ])?;
13
14    model.set_activation_operand_value(2)?;
15    model.add_operation(OperationCode::ANEURALNETWORKS_ADD, &[0, 1, 2], &[3])?;
16    model.identify_inputs_and_outputs(&[0, 1], &[3])?;
17
18    model.finish()?;
19
20    let mut compilation = model.compile()?;
21    compilation.finish()?;
22    let mut execution = compilation.create_execution()?;
23
24    // mind datatype: by default, it's f64, but we need f32
25    execution.set_input(0, &[1f32; 9])?;
26    execution.set_input(1, &[2f32; 9])?;
27
28    let mut output = [0f32; 9];
29    execution.set_output(0, &mut output)?;
30
31    let mut end_event = execution.compute()?;
32    end_event.wait()?;
33
34    assert_eq!(output, [3f32; 9]);
35
36    Ok(())
37}
Source

pub fn add_operation( &mut self, operation: OperationCode, inputs: &[u32], outputs: &[u32], ) -> Result<()>

Examples found in repository?
examples/add_arrays.rs (line 15)
4fn main() -> nnapi::Result<()> {
5    let tensor9x_type = Operand::tensor(OperandCode::ANEURALNETWORKS_TENSOR_FLOAT32, vec![9], 0., 0);
6
7    let mut model = Model::from_operands([
8        tensor9x_type.clone(),
9        tensor9x_type.clone(),
10        Operand::activation(),
11        tensor9x_type,
12    ])?;
13
14    model.set_activation_operand_value(2)?;
15    model.add_operation(OperationCode::ANEURALNETWORKS_ADD, &[0, 1, 2], &[3])?;
16    model.identify_inputs_and_outputs(&[0, 1], &[3])?;
17
18    model.finish()?;
19
20    let mut compilation = model.compile()?;
21    compilation.finish()?;
22    let mut execution = compilation.create_execution()?;
23
24    // mind datatype: by default, it's f64, but we need f32
25    execution.set_input(0, &[1f32; 9])?;
26    execution.set_input(1, &[2f32; 9])?;
27
28    let mut output = [0f32; 9];
29    execution.set_output(0, &mut output)?;
30
31    let mut end_event = execution.compute()?;
32    end_event.wait()?;
33
34    assert_eq!(output, [3f32; 9]);
35
36    Ok(())
37}
Source

pub fn identify_inputs_and_outputs( &mut self, inputs: &[u32], outputs: &[u32], ) -> Result<()>

Examples found in repository?
examples/add_arrays.rs (line 16)
4fn main() -> nnapi::Result<()> {
5    let tensor9x_type = Operand::tensor(OperandCode::ANEURALNETWORKS_TENSOR_FLOAT32, vec![9], 0., 0);
6
7    let mut model = Model::from_operands([
8        tensor9x_type.clone(),
9        tensor9x_type.clone(),
10        Operand::activation(),
11        tensor9x_type,
12    ])?;
13
14    model.set_activation_operand_value(2)?;
15    model.add_operation(OperationCode::ANEURALNETWORKS_ADD, &[0, 1, 2], &[3])?;
16    model.identify_inputs_and_outputs(&[0, 1], &[3])?;
17
18    model.finish()?;
19
20    let mut compilation = model.compile()?;
21    compilation.finish()?;
22    let mut execution = compilation.create_execution()?;
23
24    // mind datatype: by default, it's f64, but we need f32
25    execution.set_input(0, &[1f32; 9])?;
26    execution.set_input(1, &[2f32; 9])?;
27
28    let mut output = [0f32; 9];
29    execution.set_output(0, &mut output)?;
30
31    let mut end_event = execution.compute()?;
32    end_event.wait()?;
33
34    assert_eq!(output, [3f32; 9]);
35
36    Ok(())
37}
Source

pub fn finish(&mut self) -> Result<()>

Examples found in repository?
examples/add_arrays.rs (line 18)
4fn main() -> nnapi::Result<()> {
5    let tensor9x_type = Operand::tensor(OperandCode::ANEURALNETWORKS_TENSOR_FLOAT32, vec![9], 0., 0);
6
7    let mut model = Model::from_operands([
8        tensor9x_type.clone(),
9        tensor9x_type.clone(),
10        Operand::activation(),
11        tensor9x_type,
12    ])?;
13
14    model.set_activation_operand_value(2)?;
15    model.add_operation(OperationCode::ANEURALNETWORKS_ADD, &[0, 1, 2], &[3])?;
16    model.identify_inputs_and_outputs(&[0, 1], &[3])?;
17
18    model.finish()?;
19
20    let mut compilation = model.compile()?;
21    compilation.finish()?;
22    let mut execution = compilation.create_execution()?;
23
24    // mind datatype: by default, it's f64, but we need f32
25    execution.set_input(0, &[1f32; 9])?;
26    execution.set_input(1, &[2f32; 9])?;
27
28    let mut output = [0f32; 9];
29    execution.set_output(0, &mut output)?;
30
31    let mut end_event = execution.compute()?;
32    end_event.wait()?;
33
34    assert_eq!(output, [3f32; 9]);
35
36    Ok(())
37}
Source

pub fn compile(&mut self) -> Result<Compilation>

Examples found in repository?
examples/add_arrays.rs (line 20)
4fn main() -> nnapi::Result<()> {
5    let tensor9x_type = Operand::tensor(OperandCode::ANEURALNETWORKS_TENSOR_FLOAT32, vec![9], 0., 0);
6
7    let mut model = Model::from_operands([
8        tensor9x_type.clone(),
9        tensor9x_type.clone(),
10        Operand::activation(),
11        tensor9x_type,
12    ])?;
13
14    model.set_activation_operand_value(2)?;
15    model.add_operation(OperationCode::ANEURALNETWORKS_ADD, &[0, 1, 2], &[3])?;
16    model.identify_inputs_and_outputs(&[0, 1], &[3])?;
17
18    model.finish()?;
19
20    let mut compilation = model.compile()?;
21    compilation.finish()?;
22    let mut execution = compilation.create_execution()?;
23
24    // mind datatype: by default, it's f64, but we need f32
25    execution.set_input(0, &[1f32; 9])?;
26    execution.set_input(1, &[2f32; 9])?;
27
28    let mut output = [0f32; 9];
29    execution.set_output(0, &mut output)?;
30
31    let mut end_event = execution.compute()?;
32    end_event.wait()?;
33
34    assert_eq!(output, [3f32; 9]);
35
36    Ok(())
37}

Trait Implementations§

Source§

impl Deref for Model

Source§

type Target = ANeuralNetworksModel

The resulting type after dereferencing.
Source§

fn deref(&self) -> &Self::Target

Dereferences the value.
Source§

impl DerefMut for Model

Source§

fn deref_mut(&mut self) -> &mut Self::Target

Mutably dereferences the value.
Source§

impl Drop for Model

Source§

fn drop(&mut self)

Executes the destructor for this type. Read more

Auto Trait Implementations§

§

impl Freeze for Model

§

impl RefUnwindSafe for Model

§

impl !Send for Model

§

impl !Sync for Model

§

impl Unpin for Model

§

impl UnwindSafe for Model

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<P, T> Receiver for P
where P: Deref<Target = T> + ?Sized, T: ?Sized,

Source§

type Target = T

🔬This is a nightly-only experimental API. (arbitrary_self_types)
The target type on which the method may be called.
Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.