1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
//! Partial and complete tensor types representations.
use crate::internal::*;
use crate::prelude::*;
use crate::tensor::Tensor;
use downcast_rs::Downcast;
use std::convert::{TryFrom, TryInto};
use std::fmt;

/// Type information about a tensor: shape, and element type, in various state
/// of determination.
pub trait TensorInfo: std::fmt::Debug + Downcast + objekt::Clone + Send + Sync + 'static {
    /// Convert to TensorFact, the most accomoding variant of TensorInfo.
    fn to_tensor_fact(&self) -> TensorFact;
}

impl_downcast!(TensorInfo);
objekt::clone_trait_object!(TensorInfo);

impl TensorInfo for TensorFact {
    fn to_tensor_fact(&self) -> TensorFact {
        self.clone()
    }
}

impl<'a> TryFrom<&'a TensorFact> for TypedTensorInfo {
    type Error = TractError;
    fn try_from(fact: &TensorFact) -> TractResult<TypedTensorInfo> {
        if let (Some(datum_type), Some(shape)) =
            (fact.datum_type.concretize(), fact.shape.concretize())
        {
            let stream_info = shape
                .iter()
                .cloned()
                .enumerate()
                .find(|d| d.1.to_integer().is_err())
                .map(|(axis, len)| StreamInfo { axis, len });
            let shape = shape.iter().map(|d| d.to_integer().unwrap_or(0) as usize).collect();
            let shape = ShapeInfo { shape, stream_info };
            Ok(TypedTensorInfo { datum_type, shape, konst: fact.value.concretize() })
        } else {
            bail!("Can not make a TypedTensorInfo out of {:?}", fact)
        }
    }
}

impl TryFrom<TensorFact> for TypedTensorInfo {
    type Error = TractError;
    fn try_from(fact: TensorFact) -> TractResult<TypedTensorInfo> {
        (&fact).try_into()
    }
}

impl<'a> From<&'a Tensor> for TensorFact {
    fn from(t: &'a Tensor) -> TensorFact {
        TensorFact::from(t.clone())
    }
}

/// Streaming information for a streamed tensor.
#[derive(Debug, Clone, Default, PartialEq)]
pub struct StreamInfo {
    /// Streaming axis
    pub axis: usize,
    /// Streaming length
    pub len: TDim,
}

/// Fully determined dimension of a tensor.
///
/// Tensors in tract can have one streaming dimension. TDim generalize the
/// regular tensor dimensions (usize) to arithmetic expressions of `S`, the
/// (sometimes hypothetical) tensor length on the streaming axis.
#[derive(Clone)]
pub struct ShapeInfo {
    shape: TVec<usize>,
    /// Optional information for streaming tensors. None for regular tensors.
    pub stream_info: Option<StreamInfo>,
}

impl PartialEq for ShapeInfo {
    fn eq(&self, other: &ShapeInfo) -> bool {
        self.shape.len() == other.shape.len() && self.iter().zip(other.iter()).all(|(a, b)| a == b)
    }
}

impl ShapeInfo {
    /// Rank of the tensor.
    pub fn rank(&self) -> usize {
        self.shape.len()
    }

    /// Extended dimension of the i-th axis.
    ///
    /// The TDim will wrap a plain integer for regular (non-streaming) tensors.
    pub fn dim(&self, i: usize) -> TDim {
        if let Some(ref stream) = self.stream_info {
            if stream.axis == i {
                return stream.len.clone();
            }
        }
        self.shape[i].to_dim()
    }

    /// Set the i-th axis dimension.
    pub fn set_dim(&mut self, i: usize, dim: TDim) -> TractResult<()> {
        if let Some(ref stream) = self.stream_info {
            if let Ok(int) = dim.to_integer() {
                self.shape[i] = int as _;
                if stream.axis == i {
                    self.stream_info = None;
                }
            } else {
                if stream.axis != i {
                    bail!("Attempt at building a shape with two streaming dim")
                } else {
                    self.stream_info = Some(StreamInfo { len: dim, axis: i })
                }
            }
        } else {
            if let Ok(int) = dim.to_integer() {
                self.shape[i] = int as _;
            } else {
                self.shape[i] = 0;
                self.stream_info = Some(StreamInfo { len: dim, axis: i })
            }
        }
        Ok(())
    }

    /// Shape of the tensor, unless it is streaming.
    pub fn as_finite(&self) -> Option<&[usize]> {
        match self.stream_info {
            None => Some(&*self.shape),
            _ => None,
        }
    }

    /// Iterator over dimension of the shape.
    pub fn iter<'a>(&'a self) -> impl Iterator<Item = TDim> + 'a {
        self.shape.clone().into_iter().enumerate().map(move |(ix, d)| {
            if let Some(ref info) = self.stream_info {
                if ix == info.axis {
                    return info.len.clone();
                }
            }
            (d as i64).to_dim()
        })
    }

    /// Convert the shape to an array of extended dimensions.
    pub fn to_tvec(&self) -> TVec<TDim> {
        self.iter().collect::<TVec<TDim>>()
    }

    /// Convert the shape to a fully determined shape fact.
    pub fn to_shape_fact(&self) -> ShapeFact {
        ShapeFact::from(self.iter())
    }

    pub fn from_dims<T: AsRef<[TDim]> + std::fmt::Debug>(it: T) -> TractResult<ShapeInfo> {
        let count = it.as_ref().iter().filter(|t| t.is_stream()).count();
        if count > 1 {
            bail!("Shape with two streaming dims are invalid: {:?}", it)
        } else {
            let stream_info = it
                .as_ref()
                .iter()
                .enumerate()
                .find(|(_ix, d)| d.is_stream())
                .map(|(ix, d)| StreamInfo { axis: ix, len: d.clone() });
            Ok(ShapeInfo {
                shape: it
                    .as_ref()
                    .iter()
                    .map(|t| t.to_integer().map(|i| i as usize).unwrap_or(0))
                    .collect(),
                stream_info,
            })
        }
    }
}

impl TryFrom<()> for ShapeInfo {
    type Error = TractError;
    fn try_from(_it: ()) -> TractResult<ShapeInfo> {
        ShapeInfo::from_dims([0.to_dim(); 0].as_ref())
    }
}

impl TryFrom<&[TDim]> for ShapeInfo {
    type Error = TractError;
    fn try_from(it: &[TDim]) -> TractResult<ShapeInfo> {
        ShapeInfo::from_dims(it)
    }
}

impl TryFrom<&[usize]> for ShapeInfo {
    type Error = TractError;
    fn try_from(it: &[usize]) -> TractResult<ShapeInfo> {
        Ok(ShapeInfo { shape: it.into(), stream_info: None })
    }
}

impl fmt::Debug for ShapeInfo {
    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
        use itertools::Itertools;
        write!(fmt, "{}", self.iter().join("x"))
    }
}

/// Fully determined tensor information for TypedModel.
#[derive(Clone, PartialEq)]
pub struct TypedTensorInfo {
    /// tensor element type
    pub datum_type: DatumType,
    /// tensor shape
    pub shape: ShapeInfo,
    /// optional constant value
    pub konst: Option<Arc<Tensor>>,
}

impl TypedTensorInfo {
    pub fn shape<T, S, E>(shape: S) -> TractResult<TypedTensorInfo>
    where
        T: Datum,
        S: TryInto<ShapeInfo, Error = E>,
        TractError: From<E>,
    {
        Self::dt_shape(T::datum_type(), shape)
    }
    pub fn dt_shape<S, E>(datum_type: DatumType, shape: S) -> TractResult<TypedTensorInfo>
    where
        S: TryInto<ShapeInfo, Error = E>,
        TractError: From<E>,
    {
        Ok(TypedTensorInfo { datum_type, shape: shape.try_into()?, konst: None })
    }
}

impl TensorInfo for TypedTensorInfo {
    fn to_tensor_fact(&self) -> TensorFact {
        match self.konst.clone() {
            Some(k) => k.into(),
            None => TensorFact::dt_shape(self.datum_type, self.shape.to_shape_fact()),
        }
    }
}

impl From<Tensor> for TypedTensorInfo {
    fn from(t: Tensor) -> TypedTensorInfo {
        TypedTensorInfo::from(t.into_arc_tensor())
    }
}

impl<'t> From<&'t Tensor> for TypedTensorInfo {
    fn from(t: &'t Tensor) -> TypedTensorInfo {
        TypedTensorInfo::from(t.clone())
    }
}

impl From<Arc<Tensor>> for TypedTensorInfo {
    fn from(t: Arc<Tensor>) -> TypedTensorInfo {
        TypedTensorInfo {
            datum_type: t.datum_type(),
            shape: ShapeInfo { shape: t.shape().into(), stream_info: None },
            konst: Some(t),
        }
    }
}

impl TryFrom<TypedTensorInfo> for NormalizedTensorInfo {
    type Error = TractError;
    fn try_from(fact: TypedTensorInfo) -> TractResult<NormalizedTensorInfo> {
        match fact.konst {
            None => {
                Ok(NormalizedTensorInfo { shape: fact.shape.clone(), datum_type: fact.datum_type })
            }
            _ => bail!("Constant tensor are excluded from declutterd stage: {:?}", fact),
        }
    }
}

impl fmt::Debug for TypedTensorInfo {
    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
        match self.konst {
            Some(ref k) => write!(fmt, "{:?}", k),
            None => write!(fmt, "{:?}x{:?}", self.shape, self.datum_type),
        }
    }
}

/// Tensor information for Normalized models.
///
/// Constant value is not allowed, as all tensors in normalized forms are
/// variables.
#[derive(Clone, PartialEq)]
pub struct NormalizedTensorInfo {
    /// tensor element type
    pub datum_type: DatumType,
    /// tensor shape
    pub shape: ShapeInfo,
}

impl NormalizedTensorInfo {
    pub fn shape<T, S, E>(shape: S) -> TractResult<NormalizedTensorInfo>
    where
        T: Datum,
        S: TryInto<ShapeInfo, Error = E>,
        TractError: From<E>,
    {
        Self::dt_shape(T::datum_type(), shape)
    }
    pub fn dt_shape<S, E>(datum_type: DatumType, shape: S) -> TractResult<NormalizedTensorInfo>
    where
        S: TryInto<ShapeInfo, Error = E>,
        TractError: From<E>,
    {
        Ok(NormalizedTensorInfo { datum_type, shape: shape.try_into()? })
    }
}

impl TensorInfo for NormalizedTensorInfo {
    fn to_tensor_fact(&self) -> TensorFact {
        TensorFact::dt_shape(self.datum_type, self.shape.to_shape_fact())
    }
}

impl TryFrom<NormalizedTensorInfo> for TypedTensorInfo {
    type Error = TractError;
    fn try_from(fact: NormalizedTensorInfo) -> TractResult<TypedTensorInfo> {
        Ok(TypedTensorInfo { shape: fact.shape.clone(), datum_type: fact.datum_type, konst: None })
    }
}

impl fmt::Debug for NormalizedTensorInfo {
    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
        write!(fmt, "{:?}x{:?}", self.shape, self.datum_type)
    }
}

impl<'t> From<&'t Tensor> for NormalizedTensorInfo {
    fn from(t: &'t Tensor) -> NormalizedTensorInfo {
        NormalizedTensorInfo { datum_type: t.datum_type(), shape: t.shape().try_into().unwrap() }
    }
}