burn_tensor/tensor/api/
take.rs

1use crate::{AsIndex, BasicOps, Int, Tensor, backend::Backend, check, check::TensorCheck};
2use alloc::vec::Vec;
3
4impl<B, const D: usize, K> Tensor<B, D, K>
5where
6    B: Backend,
7    K: BasicOps<B>,
8{
9    /// Takes elements from the tensor along the given dimension using indices of any dimensionality.
10    ///
11    /// This behaves like numpy's take function. When indices is multi-dimensional,
12    /// the output shape will be: input.shape\[:dim\] + indices.shape + input.shape\[dim+1:\]
13    ///
14    /// # Arguments
15    ///
16    /// * `dim` - The dimension along which to select elements. Supports negative indexing.
17    /// * `indices` - The indices of elements to select. Can be any dimensionality.
18    ///   Must be valid indices in the range [0, dim_size).
19    ///
20    /// # Example
21    ///
22    /// ```rust
23    /// use burn_tensor::backend::Backend;
24    /// use burn_tensor::{Tensor, Int};
25    ///
26    /// fn example<B: Backend>() {
27    ///   let device = B::Device::default();
28    ///
29    ///   // Example with 1D indices
30    ///   let tensor = Tensor::<B, 2>::from_data([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]], &device);
31    ///   let indices = Tensor::<B, 1, Int>::from_data([2, 0, 1], &device);
32    ///   let result: Tensor<B, 2> = tensor.clone().take::<1, 2>(-1, indices);  // -1 refers to last dimension
33    ///   println!("{result}");
34    ///   // [[3.0, 1.0, 2.0], [6.0, 4.0, 5.0]]
35    ///
36    ///   // Example with 2D indices - output will have +1 dimension (2D -> 3D)
37    ///   let indices_2d = Tensor::<B, 2, Int>::from_data([[0, 2], [1, 0]], &device);
38    ///   let result: Tensor<B, 3> = tensor.take::<2, 3>(1, indices_2d);
39    ///   println!("{result}");
40    ///   // [[[1.0, 3.0], [2.0, 1.0]], [[4.0, 6.0], [5.0, 4.0]]]
41    /// }
42    /// ```
43    pub fn take<const DI: usize, const DO: usize>(
44        self,
45        dim: impl AsIndex,
46        indices: Tensor<B, DI, Int>,
47    ) -> Tensor<B, DO, K> {
48        let dim = dim.expect_dim_index(D);
49        check!(TensorCheck::take::<D, DI, DO>(dim));
50
51        // Store the indices shape for reshaping later
52        let indices_shape = indices.shape();
53        let indices_dims = indices_shape.clone();
54
55        // Flatten indices to 1D for processing
56        let indices_flat = indices.reshape([indices_shape.num_elements()]);
57
58        // Perform the selection with the flattened indices
59        let selected = self.select(dim, indices_flat);
60
61        // Build the output shape
62        // Output shape = input.shape[:dim] + indices.shape + input.shape[dim+1:]
63        let selected_shape = selected.shape();
64        let mut new_shape = Vec::with_capacity(DO);
65
66        // Add dimensions before the selected dimension
67        for i in 0..dim {
68            new_shape.push(selected_shape[i]);
69        }
70
71        // Add all indices dimensions
72        for idx_dim in indices_dims {
73            new_shape.push(idx_dim);
74        }
75
76        // Add dimensions after the selected dimension
77        for i in (dim + 1)..D {
78            new_shape.push(selected_shape[i]);
79        }
80
81        // Verify we have the correct number of dimensions
82        assert_eq!(
83            new_shape.len(),
84            DO,
85            "Internal error: shape calculation resulted in {} dims but expected {}",
86            new_shape.len(),
87            DO
88        );
89
90        // Convert to fixed-size array for reshape
91        let mut shape_array = [0; DO];
92        for (i, &s) in new_shape.iter().enumerate() {
93            shape_array[i] = s;
94        }
95
96        selected.reshape(shape_array)
97    }
98}