pub trait NDArrayReduceAll: NDArrayRead {
    // Provided methods
    fn max_all(&self) -> Result<Self::DType, Error> { ... }
    fn min_all(&self) -> Result<Self::DType, Error> { ... }
    fn product_all(&self) -> Result<Self::DType, Error> { ... }
    fn sum_all(&self) -> Result<Self::DType, Error> { ... }
}
Expand description

Array reduce operations

Provided Methods§

source

fn max_all(&self) -> Result<Self::DType, Error>

Return the maximum element in this array.

Examples found in repository?
examples/backprop.rs (line 56)
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
fn main() -> Result<(), Error> {
    let context = Context::default()?;
    let weights = RandomNormal::with_context(context.clone(), 2)?;
    let weights = ArrayOp::new(vec![2, 1], weights) - 0.5;
    let mut weights = ArrayBase::<Arc<RwLock<Buffer<f32>>>>::copy(&weights)?;

    let inputs = RandomUniform::with_context(context, vec![NUM_EXAMPLES, 2])?;
    let inputs = ArrayOp::new(vec![NUM_EXAMPLES, 2], inputs) * 2.;
    let inputs = ArrayBase::<Arc<Buffer<f32>>>::copy(&inputs)?;

    let inputs_bool = inputs.clone().lt_scalar(1.0)?;

    let inputs_left = inputs_bool
        .clone()
        .slice(vec![(0..NUM_EXAMPLES).into(), 0.into()])?;

    let inputs_right = inputs_bool.slice(vec![(0..NUM_EXAMPLES).into(), 1.into()])?;

    let labels = inputs_left
        .and(inputs_right)?
        .expand_dims(vec![1])?
        .cast()?;

    let labels = ArrayBase::<Buffer<f32>>::copy(&labels)?;

    let output = inputs.matmul(weights.clone())?;
    let error = labels.sub(output)?;
    let loss = error.clone().pow_scalar(2.)?;

    let d_loss = error * 2.;
    let weights_t = weights.clone().transpose(None)?;
    let gradient = d_loss.matmul(weights_t)?;
    let deltas = gradient.sum(vec![0], false)?.expand_dims(vec![1])?;
    let new_weights = weights.clone().add(deltas * LEARNING_RATE)?;

    let mut i = 0;
    loop {
        let loss = ArrayBase::<Buffer<f32>>::copy(&loss)?;

        if loss.clone().lt_scalar(1.0)?.all()? {
            return Ok(());
        }

        if i % 100 == 0 {
            println!(
                "loss: {} (max {})",
                loss.clone().sum_all()?,
                loss.clone().max_all()?
            );
        }

        assert!(!loss.clone().is_inf()?.any()?, "divergence at iteration {i}");
        assert!(!loss.is_nan()?.any()?, "unstable by iteration {i}");

        weights.write(&new_weights)?;

        i += 1;
    }
}
source

fn min_all(&self) -> Result<Self::DType, Error>

Return the minimum element in this array.

source

fn product_all(&self) -> Result<Self::DType, Error>

Return the product of all elements in this array.

source

fn sum_all(&self) -> Result<Self::DType, Error>

Return the sum of all elements in this array.

Examples found in repository?
examples/benchmarks.rs (line 113)
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
fn reduce_sum_all(context: &Context) -> Result<(), Error> {
    for m in 2..8 {
        let shape = (1..m).map(|dim| dim * 10).collect::<Vec<usize>>();
        let size = shape.iter().product();
        let x = ArrayBase::<Arc<Vec<_>>>::with_context(
            context.clone(),
            shape,
            Arc::new(vec![1; size]),
        )?;

        println!("reduce {:?} (size {})...", x, x.size());

        for _ in 0..ITERATIONS {
            let start = Instant::now();
            let _x = x.clone().sum_all()?;
            let duration = start.elapsed();
            println!("{:?} us", duration.as_micros());
        }
    }

    Ok(())
}
More examples
Hide additional examples
examples/backprop.rs (line 55)
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
fn main() -> Result<(), Error> {
    let context = Context::default()?;
    let weights = RandomNormal::with_context(context.clone(), 2)?;
    let weights = ArrayOp::new(vec![2, 1], weights) - 0.5;
    let mut weights = ArrayBase::<Arc<RwLock<Buffer<f32>>>>::copy(&weights)?;

    let inputs = RandomUniform::with_context(context, vec![NUM_EXAMPLES, 2])?;
    let inputs = ArrayOp::new(vec![NUM_EXAMPLES, 2], inputs) * 2.;
    let inputs = ArrayBase::<Arc<Buffer<f32>>>::copy(&inputs)?;

    let inputs_bool = inputs.clone().lt_scalar(1.0)?;

    let inputs_left = inputs_bool
        .clone()
        .slice(vec![(0..NUM_EXAMPLES).into(), 0.into()])?;

    let inputs_right = inputs_bool.slice(vec![(0..NUM_EXAMPLES).into(), 1.into()])?;

    let labels = inputs_left
        .and(inputs_right)?
        .expand_dims(vec![1])?
        .cast()?;

    let labels = ArrayBase::<Buffer<f32>>::copy(&labels)?;

    let output = inputs.matmul(weights.clone())?;
    let error = labels.sub(output)?;
    let loss = error.clone().pow_scalar(2.)?;

    let d_loss = error * 2.;
    let weights_t = weights.clone().transpose(None)?;
    let gradient = d_loss.matmul(weights_t)?;
    let deltas = gradient.sum(vec![0], false)?.expand_dims(vec![1])?;
    let new_weights = weights.clone().add(deltas * LEARNING_RATE)?;

    let mut i = 0;
    loop {
        let loss = ArrayBase::<Buffer<f32>>::copy(&loss)?;

        if loss.clone().lt_scalar(1.0)?.all()? {
            return Ok(());
        }

        if i % 100 == 0 {
            println!(
                "loss: {} (max {})",
                loss.clone().sum_all()?,
                loss.clone().max_all()?
            );
        }

        assert!(!loss.clone().is_inf()?.any()?, "divergence at iteration {i}");
        assert!(!loss.is_nan()?.any()?, "unstable by iteration {i}");

        weights.write(&new_weights)?;

        i += 1;
    }
}

Object Safety§

This trait is not object safe.

Implementors§