tract_core/ops/array/
gather.rs1use crate::internal::*;
2use crate::ops::einsum::block_quant_aware_input_shape;
3use crate::ops::matmul::pack::OptSimpleMatMulPack;
4use ndarray::*;
5use tract_linalg::block_quant::BlockQuantValue;
6use tract_linalg::mmm::MMMInputValue;
7
8#[derive(Debug, Clone, Hash, PartialEq)]
9pub struct Gather {
10 pub axis: usize,
11 pub output_type: Option<DatumType>,
12}
13
14impl Op for Gather {
15 fn name(&self) -> StaticName {
16 "Gather".into()
17 }
18
19 op_as_typed_op!();
20 impl_op_same_as!();
21}
22
23impl Gather {
24 pub fn new(axis: usize) -> Gather {
25 Gather { axis, output_type: None }
26 }
27
28 pub fn compute_output_shape<D: DimLike>(
29 &self,
30 input_shape: &[D],
31 indices_shape: &[D],
32 ) -> TractResult<TVec<D>> {
33 ensure!(input_shape.len() > self.axis);
34 let mut output_shape: TVec<D> = input_shape[..self.axis].into();
35 output_shape.extend(indices_shape.iter().cloned());
36 output_shape.extend(input_shape[self.axis + 1..].iter().cloned());
37 Ok(output_shape)
38 }
39
40 fn eval_t<T: Datum>(&self, data: TValue, indices: &TValue) -> TractResult<Tensor> {
41 let data_view = unsafe { data.to_array_view_unchecked::<T>() };
42 let indices = indices.to_array_view::<i64>()?;
43 let output_shape = &*self.compute_output_shape(data.shape(), indices.shape())?;
44 let mut output = unsafe { Tensor::uninitialized::<T>(output_shape)? };
45 let mut output_view = output.to_array_view_mut::<T>()?;
46
47 let data_shape = data.shape();
48 let data_axis = self.axis;
49
50 let block_len = data_shape[data_axis + 1..].iter().product::<usize>();
51
52 let can_block_copy = data_shape[..data_axis].iter().all(|&d| d == 1)
53 && output_shape[..data_axis].iter().all(|&d| d == 1)
54 && data_view.is_standard_layout()
55 && output_view.is_standard_layout();
56
57 if can_block_copy {
58 let mut out_offset = 0;
59 let input_slice = data_view.as_slice().unwrap();
60 let output_slice = &mut output_view.as_slice_mut().unwrap();
61 for idx_coords in indices.indexed_iter() {
62 let index = *idx_coords.1;
63 let axis_len = data_shape[data_axis] as i64;
64 let resolved_index = if index < 0 { index + axis_len } else { index };
65 let resolved_index = resolved_index as usize;
66
67 let input_offset = resolved_index * block_len;
68
69 output_slice[out_offset..out_offset + block_len]
70 .clone_from_slice(&input_slice[input_offset..input_offset + block_len]);
71 out_offset += block_len;
72 }
73 } else {
74 let ic_len = self.axis + 1 + output_shape.len() - (self.axis + indices.ndim());
75 let mut icoords = vec![0; ic_len];
76 let axis = self.axis;
77 for coords in tract_ndarray::indices(output_shape) {
78 let ocoords = coords.as_array_view();
79 let ocoords = ocoords.as_slice().unwrap();
80
81 let kcoords = &ocoords[self.axis..][..indices.ndim()];
82 let k = indices[kcoords];
83 let k = if k < 0 { k + data_view.shape()[self.axis] as i64 } else { k } as usize;
84 icoords[0..axis].copy_from_slice(&ocoords[..self.axis]);
85 icoords[self.axis] = k;
86 icoords[self.axis + 1..].clone_from_slice(&ocoords[self.axis + indices.ndim()..]);
87 output_view[ocoords] =
88 data_view.get(&*icoords).cloned().context("Invalid gather")?;
89 }
90 unsafe { output.set_datum_type(data.datum_type()) };
91 }
92 Ok(output)
93 }
94
95 fn eval_bq<F: Datum>(&self, data: &BlockQuantValue, indices: &TValue) -> TractResult<Tensor> {
96 ensure!(self.axis == 0);
97 ensure!(data.fact.shape().len() == 2);
98 let data_shape = &data.fact.shape();
99 let output_shape = &*self.compute_output_shape(data_shape, indices.shape())?;
100 let mut output = unsafe { Tensor::uninitialized::<F>(output_shape)? };
101 let indices_slice = indices.as_slice::<i64>()?;
102 let vector_len = data_shape[1];
103
104 let block_len = data.fact.format.block_len();
105 let block_bytes = data.fact.format.block_bytes();
106 if F::datum_type() == f16::datum_type() {
107 let output_slice = output.as_slice_mut::<f16>()?;
108 for (pos, ix) in indices_slice.iter().enumerate() {
109 let slice = &mut output_slice[pos * vector_len..][..vector_len];
110 for i in (0..vector_len).step_by(block_len) {
111 let offset = data_shape[1] * *ix as usize + i;
112 let block_id = offset / block_len;
113 data.fact.format.dequant_block_f16(
114 &data.value[block_id * block_bytes..][..block_bytes],
115 &mut slice[i..i + block_len],
116 );
117 }
118 }
119 } else {
120 let output_slice = output.as_slice_mut::<f32>()?;
121 for (pos, ix) in indices_slice.iter().enumerate() {
122 let slice = &mut output_slice[pos * vector_len..][..vector_len];
123 for i in (0..vector_len).step_by(block_len) {
124 let offset = data_shape[1] * *ix as usize + i;
125 let block_id = offset / block_len;
126 data.fact.format.dequant_block_f32(
127 &data.value[block_id * block_bytes..][..block_bytes],
128 &mut slice[i..i + block_len],
129 );
130 }
131 }
132 }
133 Ok(output)
134 }
135
136 fn eval_input_store<F: Datum>(
137 &self,
138 data: &dyn MMMInputValue,
139 indices: &TValue,
140 ) -> TractResult<Tensor> {
141 ensure!(self.axis == 0);
142 let data_shape = &[data.mn(), data.k()];
143 let output_shape = &*self.compute_output_shape(data_shape, indices.shape())?;
144 let mut output = unsafe { Tensor::uninitialized::<F>(output_shape)? };
145 let indices_slice = indices.as_slice::<i64>()?;
146 let vector_len = data_shape[1];
147 if F::datum_type() == f16::datum_type() {
148 let output_slice = output.as_slice_mut::<f16>()?;
149 for (pos, m) in indices_slice.iter().enumerate() {
150 let slice = &mut output_slice[pos * vector_len..][..vector_len];
151 data.extract_at_mn_f16(*m as usize, slice)?;
152 }
153 } else {
154 let output_slice = output.as_slice_mut::<f32>()?;
155 for (pos, m) in indices_slice.iter().enumerate() {
156 let slice = &mut output_slice[pos * vector_len..][..vector_len];
157 data.extract_at_mn_f32(*m as usize, slice)?;
158 }
159 }
160 Ok(output)
161 }
162}
163
164impl TypedOp for Gather {
165 as_op!();
166
167 fn output_facts(&self, inputs: &[&TypedFact]) -> TractResult<TVec<TypedFact>> {
168 if let Some(dt) = self.output_type {
169 ensure!(
170 inputs[0].datum_type.is_opaque() || inputs[0].datum_type == dt,
171 "Inconsistent datum_type in Gather: attribute is {:?}, but inputs[0] is {:?}",
172 dt,
173 inputs[0].datum_type
174 );
175 } else {
176 ensure!(
177 !inputs[0].datum_type.is_opaque(),
178 "Gather applied to compressed data requires an explicit datum_type attribute for its output"
179 );
180 }
181 ensure!(inputs[1].datum_type == i64::datum_type());
182 if inputs[0].datum_type.is_opaque() {
183 let data_shape = block_quant_aware_input_shape(inputs[0])?;
184 Ok(tvec!(self
185 .output_type
186 .unwrap()
187 .fact(&*self.compute_output_shape(&data_shape, &inputs[1].shape)?)))
188 } else {
189 Ok(tvec!(inputs[0]
190 .datum_type
191 .fact(&*self.compute_output_shape(&inputs[0].shape, &inputs[1].shape)?)))
192 }
193 }
194
195 fn declutter(
196 &self,
197 model: &TypedModel,
198 node: &TypedNode,
199 ) -> TractResult<Option<TypedModelPatch>> {
200 let (input_fact, indices_fact) = args_2!(model.node_input_facts(node.id)?);
201 if let Some(indices) = indices_fact.konst.as_ref() {
202 if indices.rank() == 1 && indices.len() == 1 && input_fact.datum_type.is_number() {
203 let mut patch = TypedModelPatch::default();
204 let mut wire = patch.tap_model(model, node.inputs[0])?;
205 let index = indices.cast_to_scalar::<i64>()?;
206 let index = if index < 0 {
207 let data_fact = model.outlet_fact(node.inputs[0])?;
208 data_fact.shape[self.axis].clone() + index.to_dim()
209 } else {
210 index.to_dim()
211 };
212 wire = patch.wire_node(
213 format!("{}.slice", node.name),
214 crate::ops::array::Slice {
215 axis: self.axis,
216 start: index.clone(),
217 end: index + 1,
218 },
219 &[wire],
220 )?[0];
221 patch.shunt_outside(model, node.id.into(), wire)?;
222 return Ok(Some(patch));
223 }
224 }
225 if input_fact.konst.is_some() {
226 if let Some(sibling) = model
228 .outlet_successors(node.inputs[0])
229 .iter()
230 .find(|o| o.node != node.id && model.node(o.node).op_is::<OptSimpleMatMulPack>())
231 {
232 let mut patch = TypedModelPatch::default();
233 let mut taps = patch.taps(model, &node.inputs)?;
234 taps[0] = patch.tap_model(model, sibling.node.into())?;
235 let wire = patch.wire_node(&node.name, self.clone(), &taps)?[0];
236 patch.shunt_outside(model, node.id.into(), wire)?;
237 return Ok(Some(patch));
238 }
239 }
240 Ok(None)
241 }
242}
243
244impl EvalOp for Gather {
245 fn is_stateless(&self) -> bool {
246 true
247 }
248
249 fn eval(&self, inputs: TVec<TValue>) -> TractResult<TVec<TValue>> {
250 let (data, indices) = args_2!(inputs);
251 let result = if let Ok(opaque) = data.to_scalar::<Opaque>() {
252 let dt = self.output_type.unwrap();
253 if let Some(data) = opaque.downcast_ref::<BlockQuantValue>() {
254 dispatch_floatlike!(Self::eval_bq(dt)(self, data, &indices))?
255 } else if let Some(data) = opaque.downcast_ref::<Box<dyn MMMInputValue>>() {
256 dispatch_floatlike!(Self::eval_input_store(dt)(self, &**data, &indices))?
257 } else {
258 bail!("Can't use Gather on {:?} input", data);
259 }
260 } else {
261 dispatch_datum!(Self::eval_t(data.datum_type())(self, data, &indices))?
262 };
263 Ok(tvec!(result.into_tvalue()))
264 }
265}
266
267#[cfg(test)]
268mod tests {
269 use super::*;
270
271 #[test]
272 fn test_should_gather_scalar_index() {
273 let data = Tensor::from(arr1(&[1i64, 2, 3]));
274 let gatherer = Gather::new(0);
275 for idx in 2..3 {
276 let index = Tensor::from(arr0(idx));
277 let outputs =
278 gatherer.eval(tvec![data.clone().into_tvalue(), index.into_tvalue()]).unwrap();
279 let output = &outputs[0];
280 assert_eq!(output.shape().len(), 0);
281 assert_eq!(*output.to_scalar::<i64>().unwrap(), idx + 1);
282 }
283 }
284}