tract_tensorflow/ops/nn/
dw_conv2d.rs1use crate::model::ParsingContext;
2use crate::tfpb::tensorflow::NodeDef;
3use tract_hir::internal::*;
4use tract_hir::ops::cnn::*;
5use tract_hir::ops::nn::*;
6
7pub fn depthwise_conv2d(_ctx: &ParsingContext, pb: &NodeDef) -> TractResult<Box<dyn InferenceOp>> {
8 let data_format = super::data_format(pb)?;
9 let padding = super::padding(pb)?;
10 let strides = super::strides(pb)?.into();
11 let dilations: TVec<usize> = pb.get_attr_list_int("dilations")?.into();
12 if dilations.len() != 4 || dilations[0] != 1 && dilations[3] != 1 {
13 bail!("dilations must be of the form [1, h, v, 1], found {:?}", dilations)
14 };
15 Ok(expand(DepthwiseConv2d::new(data_format, padding, strides, dilations)))
16}
17
18#[derive(Debug, Clone, new, Hash)]
19pub struct DepthwiseConv2d {
20 data_format: DataFormat,
21 padding: PaddingSpec,
22 strides: TVec<usize>,
23 dilations: TVec<usize>,
24}
25
26
27
28impl Expansion for DepthwiseConv2d {
29 fn name(&self) -> StaticName {
30 "DepthwiseConv2dNative".into()
31 }
32
33
34 fn rules<'r, 'p: 'r, 's: 'r>(
35 &'s self,
36 s: &mut Solver<'r>,
37 inputs: &'p [TensorProxy],
38 outputs: &'p [TensorProxy],
39 ) -> InferenceResult {
40 check_input_arity(inputs, 2)?;
41 check_output_arity(outputs, 1)?;
42 s.equals(&inputs[0].rank, 4)?;
43 s.equals(&inputs[1].rank, 4)?;
44 s.equals(&inputs[0].datum_type, &inputs[1].datum_type)?;
45 s.equals(&inputs[0].datum_type, &outputs[0].datum_type)?;
46 s.equals(&outputs[0].rank, 4)?;
47 s.given_2(&inputs[0].shape, &inputs[1].shape, move |s, img, ker| {
48 let img = self.data_format.shape(img)?;
49 s.equals(&inputs[1].shape[2], &inputs[0].shape[img.c_axis()])?;
50 s.equals(&outputs[0].shape[img.n_axis().unwrap()], img.n_dim().unwrap())?;
51 if let Ok(ker) = ker.iter().map(|d| d.to_usize()).collect::<TractResult<TVec<_>>>() {
52 let output_shape = self.padding.compute(
53 img.hw_dims(),
54 &ker[0..2],
55 &self.dilations[img.hw_axes()],
56 &self.strides[img.hw_axes()],
57 );
58 let in_channels = ker[2].to_usize()?;
59 let multiplier = ker[3].to_usize()?;
60 s.equals(&outputs[0].shape[img.h_axis()], &output_shape[0].convoluted)?;
61 s.equals(&outputs[0].shape[img.h_axis() + 1], &output_shape[1].convoluted)?;
62 s.equals(&outputs[0].shape[img.c_axis()], (in_channels * multiplier).to_dim())?;
63 }
64 Ok(())
65 })?;
66 Ok(())
67 }
68
69 fn wire(
70 &self,
71 prefix: &str,
72 model: &mut TypedModel,
73 inputs: &[OutletId],
74 ) -> TractResult<TVec<OutletId>> {
75 let input = model.outlet_fact(inputs[0])?;
76 let kernel = model.outlet_fact(inputs[1])?;
77 let input_shape = input.shape.to_tvec();
78 let kernel_shape = if let Some(s) = kernel.shape.as_concrete() {
79 s
80 } else {
81 bail!("Do not expect streaming on kernel dims");
82 };
83 let shape = self.data_format.shape(&input_shape)?;
84 let mut conv = Conv::default()
85 .hwio()
86 .group(kernel_shape[2])
87 .dilations(self.dilations[shape.hw_axes()].into())
88 .strides(self.strides[shape.hw_axes()].into())
89 .padding(self.padding.clone());
90 if self.data_format == DataFormat::NHWC {
91 conv = conv.nhwc()
92 }
93 conv.wire(prefix, model, inputs)
94 }
95}