pub trait NDArrayReduce: NDArrayRead + NDArrayTransform + Debugwhere
    Array<Self::DType>: From<Self> + From<Self::Transpose>,{
    // Provided methods
    fn max(
        self,
        axes: Vec<usize>,
        keepdims: bool
    ) -> Result<ArrayOp<ArrayReduceAxes<Self::DType, Array<Self::DType>>>, Error> { ... }
    fn min(
        self,
        axes: Vec<usize>,
        keepdims: bool
    ) -> Result<ArrayOp<ArrayReduceAxes<Self::DType, Array<Self::DType>>>, Error> { ... }
    fn product(
        self,
        axes: Vec<usize>,
        keepdims: bool
    ) -> Result<ArrayOp<ArrayReduceAxes<Self::DType, Array<Self::DType>>>, Error> { ... }
    fn sum(
        self,
        axes: Vec<usize>,
        keepdims: bool
    ) -> Result<ArrayOp<ArrayReduceAxes<Self::DType, Array<Self::DType>>>, Error> { ... }
}
Expand description

Axis-wise array reduce operations

Provided Methods§

source

fn max( self, axes: Vec<usize>, keepdims: bool ) -> Result<ArrayOp<ArrayReduceAxes<Self::DType, Array<Self::DType>>>, Error>

Construct a max-reduce operation over the given axes.

source

fn min( self, axes: Vec<usize>, keepdims: bool ) -> Result<ArrayOp<ArrayReduceAxes<Self::DType, Array<Self::DType>>>, Error>

Construct a min-reduce operation over the given axes.

source

fn product( self, axes: Vec<usize>, keepdims: bool ) -> Result<ArrayOp<ArrayReduceAxes<Self::DType, Array<Self::DType>>>, Error>

Construct a product-reduce operation over the given axes.

source

fn sum( self, axes: Vec<usize>, keepdims: bool ) -> Result<ArrayOp<ArrayReduceAxes<Self::DType, Array<Self::DType>>>, Error>

Construct a sum-reduce operation over the given axes.

Examples found in repository?
examples/benchmarks.rs (line 87)
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
fn reduce_sum_axis(context: &Context) -> Result<(), Error> {
    let shape = vec![10, 20, 30, 40, 50];
    let size = shape.iter().product();
    let queue = Queue::new(context.clone(), size)?;
    let x = ArrayBase::<Vec<_>>::with_context(context.clone(), shape, vec![1; size])?;

    println!("reduce axis {} of {:?} (size {})", 2, x, x.size());

    let reduced = x.sum(vec![2], false)?;

    for _ in 0..ITERATIONS {
        let start = Instant::now();
        let _output = reduced.read(&queue)?;
        let duration = start.elapsed();
        println!("{:?} ms", duration.as_millis());
    }

    Ok(())
}
More examples
Hide additional examples
examples/backprop.rs (line 41)
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
fn main() -> Result<(), Error> {
    let context = Context::default()?;
    let weights = RandomNormal::with_context(context.clone(), 2)?;
    let weights = ArrayOp::new(vec![2, 1], weights) - 0.5;
    let mut weights = ArrayBase::<Arc<RwLock<Buffer<f32>>>>::copy(&weights)?;

    let inputs = RandomUniform::with_context(context, vec![NUM_EXAMPLES, 2])?;
    let inputs = ArrayOp::new(vec![NUM_EXAMPLES, 2], inputs) * 2.;
    let inputs = ArrayBase::<Arc<Buffer<f32>>>::copy(&inputs)?;

    let inputs_bool = inputs.clone().lt_scalar(1.0)?;

    let inputs_left = inputs_bool
        .clone()
        .slice(vec![(0..NUM_EXAMPLES).into(), 0.into()])?;

    let inputs_right = inputs_bool.slice(vec![(0..NUM_EXAMPLES).into(), 1.into()])?;

    let labels = inputs_left
        .and(inputs_right)?
        .expand_dims(vec![1])?
        .cast()?;

    let labels = ArrayBase::<Buffer<f32>>::copy(&labels)?;

    let output = inputs.matmul(weights.clone())?;
    let error = labels.sub(output)?;
    let loss = error.clone().pow_scalar(2.)?;

    let d_loss = error * 2.;
    let weights_t = weights.clone().transpose(None)?;
    let gradient = d_loss.matmul(weights_t)?;
    let deltas = gradient.sum(vec![0], false)?.expand_dims(vec![1])?;
    let new_weights = weights.clone().add(deltas * LEARNING_RATE)?;

    let mut i = 0;
    loop {
        let loss = ArrayBase::<Buffer<f32>>::copy(&loss)?;

        if loss.clone().lt_scalar(1.0)?.all()? {
            return Ok(());
        }

        if i % 100 == 0 {
            println!(
                "loss: {} (max {})",
                loss.clone().sum_all()?,
                loss.clone().max_all()?
            );
        }

        assert!(!loss.clone().is_inf()?.any()?, "divergence at iteration {i}");
        assert!(!loss.is_nan()?.any()?, "unstable by iteration {i}");

        weights.write(&new_weights)?;

        i += 1;
    }
}

Object Safety§

This trait is not object safe.

Implementors§