1use tract_linalg::mmm::{EagerPackedInput, MMMInputValue, MatMatMul, PackedOpaqueFact};
2use tract_linalg::pack::{PackedFormat, PackingWriter};
3
4use crate::internal::*;
5use ndarray::prelude::*;
6use num_integer::Integer;
7
8use crate::ops::cnn::pools::{ConcretePoolGeometry, PoolGeometry};
9use crate::ops::cnn::{GeometryBound, PoolSpec, ResolveTo};
10use crate::ops::nn::{BaseDataShape, DataFormat, DataShape};
11
12#[derive(Debug, Clone, PartialEq, Eq, Hash)]
13pub struct Im2Col {
14 pub pool_spec: PoolSpec,
15 pub group: usize,
16 geometry: GeometryBound<SymbolicGeometry, ConcreteGeometry>,
17}
18
19#[derive(Debug, Clone, Hash, PartialEq, Eq)]
20struct SymbolicGeometry {
21 group: usize,
22 pool_spec: PoolSpec,
23 pool_geometry: PoolGeometry,
24 b_pack: PackedFormat,
25 k: usize,
26}
27
28#[derive(Debug, Clone, Hash, PartialEq, Eq)]
29struct ConcreteGeometry {
30 pool: ConcretePoolGeometry,
31 pub n: usize,
32 k: usize,
33 pub b_pack: PackedFormat,
34 pub ci_per_group: usize,
35 patcher: Patcher,
36 input_shape_with_n: DataShape,
37 packed_shape: TVec<usize>, }
39
40impl GeometryBound<SymbolicGeometry, ConcreteGeometry> {
41 pub fn b_pack(&self) -> &PackedFormat {
42 match self {
43 GeometryBound::Symbolic(s) => &s.b_pack,
44 GeometryBound::Concrete(s) => &s.b_pack,
45 }
46 }
47 pub fn k(&self) -> usize {
48 match self {
49 GeometryBound::Symbolic(s) => s.k,
50 GeometryBound::Concrete(s) => s.k,
51 }
52 }
53}
54
55impl ResolveTo<ConcreteGeometry> for SymbolicGeometry {
56 type Param = [usize];
57 fn resolve(&self, input_full_shape: &[usize]) -> TractResult<ConcreteGeometry> {
58 let pool = self.pool_geometry.to_concrete(input_full_shape)?.into_owned();
59 let patcher = if !pool.patch.padded && pool.patch.rank() == 2 {
60 Patcher::Valid2d
61 } else if pool.patch.rank() == 2 {
62 Patcher::Padded2d
63 } else if !pool.patch.padded && pool.patch.rank() == 1 {
64 Patcher::Valid1d
65 } else {
66 Patcher::Generic
67 };
68 let ci_per_group = pool.input_shape.c_dim() / self.group;
69 let n = pool.output_shape.hw_dims().iter().product();
70 let input_shape_with_n = match self.pool_spec.data_format {
71 DataFormat::HWC => DataFormat::NHWC.from_n_c_hw(
72 1,
73 *pool.input_shape.c(),
74 pool.input_shape.hw_dims(),
75 )?,
76 DataFormat::CHW => DataFormat::NCHW.from_n_c_hw(
77 1,
78 *pool.input_shape.c(),
79 pool.input_shape.hw_dims(),
80 )?,
81 _ => pool.input_shape.clone(),
82 };
83 let packed_shape = Im2Col::packed_shape(&pool.input_shape, self.group)?;
84 Ok(ConcreteGeometry {
85 pool,
86 n,
87 k: self.k,
88 ci_per_group,
89 b_pack: self.b_pack.clone(),
90 patcher,
91 input_shape_with_n,
92 packed_shape,
93 })
94 }
95}
96
97impl Im2Col {
98 pub fn new(
99 pool_spec: PoolSpec,
100 group: usize,
101 k: usize,
102 input_full_shape: &ShapeFact,
103 mmm: Box<dyn MatMatMul>,
104 packing: usize,
105 ) -> TractResult<Im2Col> {
106 let b_pack = mmm.packings()[packing]
107 .1
108 .downcast_ref::<PackedFormat>()
109 .context("Im2Col expects regular packed format")?
110 .clone();
111
112 let pool_geometry = pool_spec.compute_geo(input_full_shape)?;
113 let geometry: GeometryBound<_, _> =
114 SymbolicGeometry { group, pool_spec: pool_spec.clone(), pool_geometry, b_pack, k }
115 .into();
116 let geometry = geometry.optimize_if(input_full_shape.as_concrete())?;
117 Ok(Im2Col { pool_spec, group, geometry })
118 }
119
120 fn packed_shape<D: DimLike>(
122 input_shape: &BaseDataShape<D, TVec<D>>,
123 group: usize,
124 ) -> TractResult<TVec<D>> {
125 let mut output_shape: TVec<D> = tvec!();
126 output_shape.push(input_shape.n().cloned().unwrap_or_else(|| 1.into()));
127 output_shape.push(group.into());
128 Ok(output_shape)
129 }
130}
131
132impl Op for Im2Col {
133 fn name(&self) -> Cow<str> {
134 "Im2col".into()
135 }
136
137 fn info(&self) -> TractResult<Vec<String>> {
138 Ok(vec![format!("groups:{}", self.group)])
139 }
140
141 impl_op_same_as!();
142 op_as_typed_op!();
143}
144
145impl EvalOp for Im2Col {
146 fn is_stateless(&self) -> bool {
147 true
148 }
149
150 fn eval(&self, mut inputs: TVec<TValue>) -> TractResult<TVec<TValue>> {
151 let geometry = self.geometry.to_concrete(inputs[0].shape())?;
152 unsafe {
153 let mut input = inputs.remove(0).into_tensor();
154 let pad_value: Option<&Tensor> = if inputs.len() > 0 { Some(&inputs[0]) } else { None };
155 let mut output = Tensor::uninitialized::<Opaque>(&geometry.packed_shape)?;
156 if !self.pool_spec.data_format.has_n() {
157 input.insert_axis(0)?;
158 }
159 let mut output_view = output.to_array_view_mut::<Opaque>()?;
160 let panel_bytes =
161 geometry.b_pack.single_panel_len(geometry.k) * input.datum_type().size_of();
162
163 if !geometry.pool.output_shape.shape.contains(&0) {
166 for i in 0..*geometry.input_shape_with_n.n().unwrap_or(&1) {
167 let input = input.view_at_prefix(&[i])?;
168 for g in 0..self.group {
169 let mut data = Tensor::uninitialized_aligned_dt(
170 input.datum_type(),
171 &[geometry.b_pack.len(geometry.k, geometry.n)],
172 geometry.b_pack.alignment(),
173 )?;
174 dispatch_copy_by_size!(Patcher::patch(input.datum_type())(
175 &geometry.patcher,
176 &geometry,
177 &input,
178 &mut data.view_mut(),
179 g,
180 pad_value
181 ))?;
182 let input: Box<dyn MMMInputValue> = Box::new(EagerPackedInput {
183 fact: PackedOpaqueFact {
184 format: Box::new(geometry.b_pack.clone()),
185 k: geometry.k,
186 mn: geometry.n.to_dim(),
187 },
188 packed: data.into_blob()?.into(),
189 panel_bytes,
190 mn: geometry.n,
191 });
192 output_view[[i, g]] = input.into();
193 }
194 }
195 }
196 Ok(tvec!(output.into_tvalue()))
197 }
198 }
199}
200
201impl TypedOp for Im2Col {
202 as_op!();
203
204 fn output_facts(&self, inputs: &[&TypedFact]) -> TractResult<TVec<TypedFact>> {
205 let input_shape = self.pool_spec.data_format.shape(inputs[0].shape.to_tvec())?;
206 let output_shape = self.pool_spec.output_shape(&inputs[0].shape)?;
207 let mn = output_shape.hw_dims().iter().product::<TDim>();
208 let pof = PackedOpaqueFact {
209 format: Box::new(self.geometry.b_pack().clone()),
210 k: self.geometry.k(),
211 mn,
212 };
213 Ok(tvec!(Opaque::fact(&[input_shape.n().cloned().unwrap_or(1.into()), self.group.into()])
214 .with_opaque_fact(pof)))
215 }
216
217 fn declutter(
218 &self,
219 model: &TypedModel,
220 node: &TypedNode,
221 ) -> TractResult<Option<TypedModelPatch>> {
222 let input_fact = model.outlet_fact(node.inputs[0])?;
223 if node.inputs.len() == 2
224 && model.outlet_fact(node.inputs[1])?.konst.as_ref().and_then(|t| t.as_uniform())
225 == Some(Tensor::zero_scalar_dt(input_fact.datum_type)?)
226 {
227 Ok(Some(
228 TypedModelPatch::replace_single_op(model, node, &node.inputs[0..1], self.clone())?
229 .with_context("b0 is zero"),
230 ))
231 } else {
232 Ok(None)
233 }
234 }
235}
236
237#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
238enum Patcher {
239 Generic,
240 Valid1d,
241 Valid2d,
242 Padded2d,
243}
244
245impl Patcher {
246 fn patch<'p, T: Copy + Datum + num_traits::Zero>(
247 &self,
248 geo: &'p ConcreteGeometry,
249 input: &TensorView,
250 pack: &'p mut TensorView,
251 g: usize,
252 pad_value: Option<&Tensor>,
253 ) -> TractResult<()> {
254 match self {
255 Patcher::Valid1d => Self::valid_1d::<T>(geo, input, pack, g),
256 Patcher::Valid2d => Self::valid_2d::<T>(geo, input, pack, g),
257 Patcher::Padded2d => Self::padded_2d::<T>(
258 geo,
259 input,
260 pack,
261 g,
262 pad_value.unwrap_or(&Tensor::zero_scalar::<T>()?),
263 ),
264 _ => Self::generic::<T>(
265 geo,
266 input,
267 pack,
268 g,
269 pad_value.unwrap_or(&Tensor::zero_scalar::<T>()?),
270 ),
271 }
272 }
273
274 #[inline(never)]
275 fn generic<'p, T: Copy + Datum>(
276 geometry: &'p ConcreteGeometry,
277 input: &TensorView,
278 pack: &'p mut TensorView,
279 g: usize,
280 pad_value: &Tensor,
281 ) -> TractResult<()> {
282 unsafe {
283 let pad_value = *pad_value.to_scalar_unchecked();
284 let mut mega_matrix = Tensor::uninitialized::<T>(&[geometry.k, geometry.n])?;
285 let mut mega_matrix_view = mega_matrix.to_array_view_mut_unchecked::<T>();
286 let ptr = input.as_ptr_unchecked::<T>();
287 let ptr = ptr.add(geometry.input_shape_with_n.c_stride() * (g * geometry.ci_per_group));
288 for (spatial, mut col) in ndarray::indices(&*geometry.pool.patch.output_shape)
289 .into_iter()
290 .zip(mega_matrix_view.axis_iter_mut(Axis(1)))
291 {
292 let mut col = col.iter_mut();
293 for ci in 0..geometry.ci_per_group {
294 let ptr = ptr.add(geometry.input_shape_with_n.c_stride() * ci);
295 for v in geometry.pool.patch.at(spatial.slice()) {
296 *col.next().expect("geometry error in conv") =
297 v.map(|o| *ptr.offset(o)).unwrap_or(pad_value);
298 }
299 }
300 }
301 geometry.b_pack.pack(pack, mega_matrix.view(), 0, 1);
302 Ok(())
303 }
304 }
305
306 #[inline(never)]
307 fn valid_1d<'p, T: Copy + Datum>(
308 geometry: &'p ConcreteGeometry,
309 input: &TensorView,
310 pack: &'p mut TensorView,
311 g: usize,
312 ) -> TractResult<()> {
313 unsafe {
314 let x_stride = *geometry.input_shape_with_n.h_stride() as isize
315 * geometry.pool.patch.spec.strides[0] as isize;
316 let c_stride = *geometry.input_shape_with_n.c_stride() as isize;
317 let pack = pack.as_slice_mut_unchecked::<T>();
318 let mut writer =
319 geometry.b_pack.write_with_k_outer(pack.as_mut_ptr(), geometry.k, geometry.n);
320 let iptr = input.as_ptr_unchecked::<T>();
321 let iptr = iptr.add(g * geometry.ci_per_group * geometry.input_shape_with_n.c_stride());
322 for ci in 0..geometry.ci_per_group {
323 let iptr = iptr.offset(ci as isize * c_stride);
324 for koffset in &geometry.pool.patch.standard_layout_data_field {
325 let iptr = iptr.offset(*koffset);
326 for x in 0..*geometry.pool.patch.output_shape.get_unchecked(0) {
327 writer.write(*iptr.offset(x as isize * x_stride));
328 }
329 }
330 }
331 Ok(())
332 }
333 }
334
335 #[inline(never)]
336 fn padded_2d<'p, T: Copy + Datum>(
337 geometry: &'p ConcreteGeometry,
338 input: &TensorView,
339 pack: &'p mut TensorView,
340 g: usize,
341 pad_value: &Tensor,
342 ) -> TractResult<()> {
343 unsafe {
344 let pad_value = *pad_value.to_scalar_unchecked();
345 let pack = pack.as_slice_mut_unchecked::<T>();
346 let y_stride = geometry.pool.patch.spec.strides[0] as isize;
347 let x_stride = geometry.pool.patch.spec.strides[1] as isize;
348 let shape = &geometry.input_shape_with_n;
349 let y_stride_ptr = y_stride * *shape.h_stride() as isize;
350 let x_stride_ptr = x_stride * *shape.w_stride() as isize;
351 let c_stride_ptr = *shape.c_stride() as isize;
352 let input_heigth = shape.hw_dims()[0] as isize;
353 let input_width = shape.hw_dims()[1] as isize;
354 let kernel_len = geometry.pool.patch.standard_layout_data_field.len();
355 let mut writer =
356 geometry.b_pack.write_with_k_outer(pack.as_mut_ptr(), geometry.k, geometry.n);
357 let iptr = input.as_ptr_unchecked::<T>();
358 let iptr = iptr.add(g * geometry.ci_per_group * shape.c_stride());
359 let output_width = *geometry.pool.patch.output_shape.get_unchecked(1);
360 for ci in 0..geometry.ci_per_group {
361 let iptr = iptr.offset(ci as isize * c_stride_ptr);
362 for kitem in 0..kernel_len {
363 let dy = *geometry.pool.patch.data_field.as_ptr().offset(kitem as isize * 2);
364 let dx =
365 *geometry.pool.patch.data_field.as_ptr().offset(1 + kitem as isize * 2);
366 let valid_x_start =
367 Integer::div_ceil(&-dx, &x_stride).max(0).min(output_width as _);
368 let valid_x_end =
369 Integer::div_ceil(&(input_width - dx), &x_stride).min(output_width as _);
370
371 let iptr = iptr.offset(
372 *geometry.pool.patch.standard_layout_data_field.get_unchecked(kitem),
373 );
374 for yo in 0..*geometry.pool.patch.output_shape.get_unchecked(0) {
375 let y = yo as isize * y_stride + dy;
376 let iptr = iptr.offset(yo as isize * y_stride_ptr);
377 if y >= 0 && y < input_heigth {
378 Self::padded_2d_invalid_x_loop(
379 valid_x_start as usize,
380 pad_value,
381 &mut writer,
382 );
383 Self::padded_2d_valid_x_loop(
384 valid_x_start,
385 valid_x_end,
386 x_stride_ptr,
387 iptr,
388 &mut writer,
389 );
390 Self::padded_2d_invalid_x_loop(
391 output_width - valid_x_end as usize,
392 pad_value,
393 &mut writer,
394 );
395 } else {
396 Self::padded_2d_invalid_x_loop(output_width, pad_value, &mut writer);
397 }
398 }
399 }
400 }
401 }
402 Ok(())
403 }
404
405 #[inline(never)]
406 unsafe fn padded_2d_invalid_x_loop<T: Copy + Datum>(
407 count: usize,
408 pad_value: T,
409 writer: &mut tract_linalg::pack::KOutWriter<T>,
410 ) {
411 for _ in 0..count {
412 writer.write(pad_value);
413 }
414 }
415
416 #[inline(never)]
417 unsafe fn padded_2d_valid_x_loop<T: Copy + Datum>(
418 x_min: isize,
419 x_max: isize,
420 x_stride_ptr: isize,
421 iptr: *const T,
422 writer: &mut tract_linalg::pack::KOutWriter<T>,
423 ) {
424 for x in x_min..x_max {
425 writer.write(*iptr.offset(x * x_stride_ptr));
426 }
427 }
428
429 #[inline(never)]
430 fn valid_2d<'p, T: Copy + Datum>(
431 geometry: &'p ConcreteGeometry,
432 input: &TensorView,
433 pack: &'p mut TensorView,
434 g: usize,
435 ) -> TractResult<()> {
436 unsafe {
437 let pack = pack.as_slice_mut_unchecked::<T>();
438 let shape = &geometry.input_shape_with_n;
439 let y_stride = geometry.pool.patch.spec.strides[0] as isize;
440 let x_stride = geometry.pool.patch.spec.strides[1] as isize;
441 let y_stride_ptr = y_stride * *shape.h_stride() as isize;
442 let x_stride_ptr = x_stride * *shape.w_stride() as isize;
443 let c_stride_ptr = *shape.c_stride() as isize;
444 let mut writer =
445 geometry.b_pack.write_with_k_outer(pack.as_mut_ptr(), geometry.k, geometry.n);
446 let iptr = input.as_ptr_unchecked::<T>();
447 let iptr = iptr.add(g * geometry.ci_per_group * shape.c_stride());
448 for ci in 0..geometry.ci_per_group {
449 let iptr = iptr.offset(ci as isize * c_stride_ptr);
450 for koffset in &geometry.pool.patch.standard_layout_data_field {
451 let iptr = iptr.offset(*koffset);
452 for y in 0..*geometry.pool.patch.output_shape.get_unchecked(0) {
453 let iptr = iptr.offset(y as isize * y_stride_ptr);
454 for x in 0..*geometry.pool.patch.output_shape.get_unchecked(1) {
455 writer.write(*iptr.offset(x as isize * x_stride_ptr));
456 }
457 }
458 }
459 }
460 Ok(())
461 }
462 }
463}