tract_core/ops/array/
concat.rs1use tract_data::itertools::Itertools;
2use tract_num_traits::Zero;
3
4use crate::internal::*;
5
6use super::Slice;
7
8#[derive(new, Debug, Clone, Hash)]
9pub struct TypedConcat {
10 pub axis: usize,
11}
12
13impl TypedConcat {
14 pub fn offsets(&self, inputs: &[&TypedFact]) -> TractResult<Vec<TDim>> {
15 let mut offsets = vec![0.to_dim()];
16 for slice in inputs {
17 let len = slice.shape[self.axis].clone();
18 let offset = len + offsets.last().unwrap();
19 offsets.push(offset)
20 }
21 Ok(offsets)
22 }
23}
24
25impl Op for TypedConcat {
26 fn name(&self) -> Cow<str> {
27 "Concat".into()
28 }
29
30 fn info(&self) -> TractResult<Vec<String>> {
31 Ok(vec![format!("axis: {}", self.axis)])
32 }
33
34 op_as_typed_op!();
35}
36
37impl TypedOp for TypedConcat {
38 as_op!();
39
40 fn output_facts(&self, inputs: &[&TypedFact]) -> TractResult<TVec<TypedFact>> {
41 let mut fact = inputs[0].without_value();
42 for input in inputs {
43 if input.rank() != fact.rank()
44 || input
45 .shape
46 .iter()
47 .zip(fact.shape.iter())
48 .enumerate()
49 .filter(|(ax, _)| *ax != self.axis)
50 .any(|(_, (i, f))| i != f)
51 {
52 bail!("Inconsistent concat {:?} inputs: {:?}", self, inputs);
53 }
54 }
55 fact.shape.set(self.axis, self.offsets(inputs)?.pop().unwrap());
56 Ok(tvec!(fact))
57 }
58
59 fn axes_mapping(
60 &self,
61 inputs: &[&TypedFact],
62 outputs: &[&TypedFact],
63 ) -> TractResult<AxesMapping> {
64 let mut axes = AxesMapping::disconnected(inputs, outputs)?;
65 for ax in 0..outputs[0].rank() {
66 if ax != self.axis {
67 for i in 0..inputs.len() {
68 axes = axes.linking((InOut::Out(0), ax), (InOut::In(i), ax))?;
69 }
70 }
71 }
72 Ok(axes)
73 }
74
75 fn change_axes(
76 &self,
77 model: &TypedModel,
78 node: &TypedNode,
79 _io: InOut,
80 change: &AxisOp,
81 ) -> TractResult<Option<AxisChangeConsequence>> {
82 let axis =
83 if let Some(axis) = change.transform_axis(self.axis) { axis } else { return Ok(None) };
84 let op = TypedConcat { axis };
85 Ok(Some(AxisChangeConsequence::new(model, node, Some(Box::new(op)), change)))
86 }
87
88 fn declutter(
89 &self,
90 model: &TypedModel,
91 node: &TypedNode,
92 ) -> TractResult<Option<TypedModelPatch>> {
93 if node.inputs.len() == 1 {
94 return TypedModelPatch::shunt_one_op(model, node);
95 }
96 let inputs = model.node_input_facts(node.id)?;
97 if let Some(pos) = inputs.iter().position(|f| f.shape.volume().is_zero()) {
98 let mut inputs = node.inputs.clone();
99 inputs.remove(pos);
100 return Ok(Some(TypedModelPatch::replace_single_op(
101 model,
102 node,
103 &inputs,
104 self.clone(),
105 )?));
106 }
107 Ok(None)
108 }
109
110 fn slice(
111 &self,
112 patch: &mut TypedModelPatch,
113 _model: &TypedModel,
114 _node: &TypedNode,
115 prefix: &str,
116 inputs: &[OutletId],
117 output_axis: usize,
118 start: &TDim,
119 end: &TDim,
120 ) -> TractResult<Option<TVec<OutletId>>> {
121 if output_axis != self.axis {
122 return Ok(Some(patch.wire_node(prefix, self.clone(), inputs)?));
123 }
124 let facts =
125 inputs.iter().map(|o| patch.outlet_fact(*o)).collect::<TractResult<TVec<_>>>()?;
126 let offsets = self.offsets(&facts)?;
127 std::mem::drop(facts);
128 for (ix, (slice_start, slice_end)) in offsets.iter().tuple_windows().enumerate() {
129 if (start.clone() - slice_start).prove_positive_or_zero()
130 && (slice_end.clone() - end).prove_positive_or_zero()
131 {
132 return patch
133 .wire_node(
134 format!("{prefix}.slice-{output_axis}.{start}..{end}"),
135 Slice {
136 axis: output_axis,
137 start: (start.clone() - slice_start),
138 end: (end.clone() - slice_start),
139 },
140 &[inputs[ix]],
141 )
142 .map(Some);
143 }
144 }
145 Ok(None)
146 }
147}
148
149impl EvalOp for TypedConcat {
150 fn is_stateless(&self) -> bool {
151 true
152 }
153
154 fn eval(&self, inputs: TVec<TValue>) -> TractResult<TVec<TValue>> {
155 let result = Tensor::stack_tensors(self.axis, &inputs)?;
156 Ok(tvec![result.into_tvalue()])
157 }
158}