1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
//! # Tensorflow Deploy
//!
//! Tiny, no-nonsense, self contained, portable Tensorflow inference.
//!
//! ## Example
//!
//! ```
//! # extern crate tfdeploy;
//! # extern crate ndarray;
//! # fn main() {
//! // load a simple model that just add 3 to each input component
//! let graph = tfdeploy::for_path("tests/plus3.pb").unwrap();
//!
//! // "input" and "output" are tensorflow graph node names.
//! // we need to map these names to ids
//! let input_id = graph.node_id_by_name("input").unwrap();
//! let output_id = graph.node_id_by_name("output").unwrap();
//!
//! // run the computation.
//! let input = ndarray::arr1(&[1.0f32, 2.5, 5.0]);
//! let mut outputs = graph.run(vec![(input_id,input.into())], output_id).unwrap();
//!
//! // grab the first (and only) tensor of the result, and unwrap it as array of f32
//! let output = outputs.remove(0).take_f32s().unwrap();
//! assert_eq!(output, ndarray::arr1(&[4.0, 5.5, 8.0]).into_dyn());
//! # }
//! ```
//!
//! For a more serious example, see [inception v3 example](https://github.com/kali/tensorflow-deploy-rust/blob/master/examples/inceptionv3.rs).

extern crate bit_set;
#[macro_use]
extern crate derive_new;
#[macro_use]
extern crate error_chain;
extern crate image;
#[allow(unused_imports)]
#[macro_use]
extern crate log;
#[allow(unused_imports)]
#[macro_use]
extern crate ndarray;
extern crate num_traits;
#[cfg(test)]
#[macro_use]
#[allow(unused_imports)]
extern crate proptest;
extern crate protobuf;
#[cfg(feature = "tensorflow")]
extern crate tensorflow;

pub mod errors;
pub mod tfpb;
pub mod matrix;
pub mod ops;

#[cfg(feature = "tensorflow")]
pub mod tf;

use std::{fs, path, str};
use std::collections::{HashMap, HashSet};
use ops::{Input, Op};
pub use errors::*;

pub use matrix::Matrix;

#[derive(Debug)]
pub struct Node {
    pub id: usize,
    pub name: String,
    pub op_name: String,
    pub inputs: Vec<(usize, Option<usize>)>,
    pub op: Box<Op>,
}

impl Node {
    pub fn dump_eval_tree(&self, model: &Model) -> String {
        self._dump_eval_tree(model, 0, &mut HashSet::new())
    }

    fn _dump_eval_tree(&self, model: &Model, depth: usize, dups: &mut HashSet<String>) -> String {
        let pad: String = ::std::iter::repeat("  ").take(depth).collect();
        let mut s = format!("{}{}\n", pad, self.name);
        for i in &self.inputs {
            let node = &model.nodes[i.0];
            s.push_str(&*format!(
                "{}",
                node._dump_eval_tree(&model, depth + 1, dups)
            ));
        }
        s
    }

    pub fn eval_order(&self, model: &Model) -> Result<Vec<usize>> {
        let mut order: Vec<usize> = Vec::new();
        let mut done = bit_set::BitSet::with_capacity(model.nodes.len());
        let mut needed = bit_set::BitSet::with_capacity(model.nodes.len());
        needed.insert(self.id);
        loop {
            let mut done_something = false;
            let mut missing = needed.clone();
            missing.difference_with(&done);
            for node_id in missing.iter() {
                let mut computable = true;
                let node = &model.nodes[node_id];
                for i in node.inputs.iter() {
                    if !done.contains(i.0) {
                        computable = false;
                        done_something = true;
                        needed.insert(i.0.clone());
                    }
                }
                if computable {
                    done_something = true;
                    order.push(node_id);
                    done.insert(node_id);
                }
            }
            if !done_something {
                break;
            }
        }
        if done.contains(self.id) {
            Ok(order)
        } else {
            Err(format!("Could not compute node {}", self.name).into())
        }
    }
}

/// Load a Tensorflow protobul model from a file.
pub fn for_path<P: AsRef<path::Path>>(p: P) -> Result<Model> {
    Model::for_path(p)
}

pub struct Plan {
    order: Vec<usize>,
}

impl Plan {
    fn for_node(model: &Model, target: usize) -> Result<Plan> {
        Self::for_nodes(model, &[target])
    }

    fn for_nodes(model: &Model, targets: &[usize]) -> Result<Plan> {
        let mut order: Vec<usize> = Vec::new();
        let mut done = bit_set::BitSet::with_capacity(model.nodes.len());
        let mut needed = bit_set::BitSet::with_capacity(model.nodes.len());
        for &t in targets {
            needed.insert(t);
        }
        loop {
            let mut done_something = false;
            let mut missing = needed.clone();
            missing.difference_with(&done);
            for node_id in missing.iter() {
                let mut computable = true;
                let node = &model.nodes[node_id];
                for i in node.inputs.iter() {
                    if !done.contains(i.0) {
                        computable = false;
                        done_something = true;
                        needed.insert(i.0.clone());
                    }
                }
                if computable {
                    done_something = true;
                    order.push(node_id);
                    done.insert(node_id);
                }
            }
            if !done_something {
                break;
            }
        }
        for &t in targets {
            if !done.contains(t) {
                let node = &model.nodes[t];
                Err(format!("Could not plan for node {}", node.name))?
            }
        }
        Ok(Plan { order })
    }

    pub fn run(&self, state: &mut ModelState) -> Result<()> {
        for &n in &self.order {
            if state.outputs[n].is_none() {
                state.compute_one(n)?;
            }
        }
        Ok(())
    }
}

/// Model is Tfdeploy workhouse. It wraps a protobuf tensorflow model,
/// and runs the inference interpreter.
///
pub struct Model {
    nodes: Vec<Node>,
    nodes_by_name: HashMap<String, usize>,
}

impl Model {
    pub fn new(graph: tfpb::graph::GraphDef) -> Result<Model> {
        let mut nodes = vec![];
        let mut nodes_by_name: HashMap<String, usize> = HashMap::new();
        let op_builder = ops::OpBuilder::new();
        for pbnode in graph.get_node().iter() {
            let name = pbnode.get_name().to_string();
            let inputs: Vec<(usize, Option<usize>)> = pbnode
                .get_input()
                .iter()
                .map(|i| {
                    let input: (usize, Option<usize>) = if i.starts_with("^") {
                        (
                            nodes_by_name
                                .get(&*i.replace("^", ""))
                                .ok_or(format!("No node {} found", i))?
                                .clone(),
                            None,
                        )
                    } else {
                        (
                            nodes_by_name
                                .get(i)
                                .ok_or(format!("No node {} found", i))?
                                .clone(),
                            Some(0usize),
                        )
                    };
                    Ok((input.0.clone(), input.1))
                })
                .collect::<Result<Vec<_>>>()
                .map_err(|e| format!("While building node {}, {}", name, e.description()))?;
            let node = Node {
                id: nodes.len(),
                name: name.to_string(),
                op_name: pbnode.get_op().to_string(),
                inputs: inputs,
                op: op_builder
                    .build(&pbnode)
                    .map_err(|e| format!("While building node {}, {}", name, e.description()))?,
            };
            nodes_by_name.insert(name, nodes.len());
            nodes.push(node)
        }
        Ok(Model {
            nodes,
            nodes_by_name,
        })
    }

    pub fn node_id_by_name(&self, name: &str) -> Result<usize> {
        self.nodes_by_name
            .get(name)
            .cloned()
            .ok_or(format!("Node named {} not found", name).into())
    }

    pub fn state(&self) -> ModelState {
        ModelState {
            model: self,
            outputs: vec![None; self.nodes.len()],
        }
    }

    /// Load a Tensorflow protobul model from a file.
    pub fn for_path<P: AsRef<path::Path>>(p: P) -> Result<Model> {
        Self::for_reader(fs::File::open(p)?)
    }

    /// Load a Tfdeploy model from a reader.
    pub fn for_reader<R: ::std::io::Read>(r: R) -> Result<Model> {
        Model::new(Self::graphdef_for_reader(r)?)
    }

    /// Load a Tensorflow protobuf graph def from a reader.
    pub fn graphdef_for_reader<R: ::std::io::Read>(mut r: R) -> Result<::tfpb::graph::GraphDef> {
        Ok(::protobuf::core::parse_from_reader::<
            ::tfpb::graph::GraphDef,
        >(&mut r)?)
    }

    /// Load a Tensorflow protobuf graph def from a path
    pub fn graphdef_for_path<P: AsRef<path::Path>>(p: P) -> Result<::tfpb::graph::GraphDef> {
        Self::graphdef_for_reader(fs::File::open(p)?)
    }

    pub fn node_names(&self) -> Vec<&str> {
        self.nodes.iter().map(|s| &*s.name).collect()
    }

    /// Build a tfdeploy Node by name.
    pub fn get_node(&self, name: &str) -> Result<&Node> {
        Ok(&self.nodes[self.node_id_by_name(name)?])
    }

    pub fn plan_for_one(&self, node: usize) -> Result<Plan> {
        Plan::for_node(&self, node)
    }

    pub fn run(&self, inputs: Vec<(usize, Matrix)>, output: usize) -> Result<Vec<Matrix>> {
        self.state().run(inputs, output)
    }

    pub fn nodes(&self) -> &[Node] {
        &*self.nodes
    }

    pub fn run_with_names(&self, inputs: Vec<(&str, Matrix)>, output: &str) -> Result<Vec<Matrix>> {
        let inputs = inputs
            .into_iter()
            .map(|(name, mat)| -> Result<(usize, Matrix)> {
                Ok((self.node_id_by_name(name)?, mat))
            })
            .collect::<Result<_>>()?;
        self.run(inputs, self.node_id_by_name(output)?)
    }
}

pub struct ModelState<'a> {
    model: &'a Model,
    pub outputs: Vec<Option<Vec<Input>>>,
}

impl<'a> ModelState<'a> {
    /// Reset internal state.
    pub fn reset(&mut self) -> Result<()> {
        self.outputs = vec![None; self.model.nodes.len()];
        Ok(())
    }

    pub fn set_outputs(&mut self, id: usize, values: Vec<Matrix>) -> Result<()> {
        self.outputs[id] = Some(values.into_iter().map(Input::Owned).collect());
        Ok(())
    }

    pub fn set_value(&mut self, id: usize, value: Matrix) -> Result<()> {
        self.set_outputs(id, vec![value])
    }

    pub fn compute_one(&mut self, node: usize) -> Result<()> {
        let node: &Node = &self.model.nodes[node];
        let mut inputs: Vec<Input> = vec![];
        for i in &node.inputs {
            let prec_node = &self.model.nodes[i.0];
            let prec = self.outputs[i.0].as_ref().ok_or(format!(
                "Computing {}, precursor {} not done:",
                node.name, prec_node.name
            ))?;
            inputs.push(prec[i.1.ok_or("no output found")?].clone().into())
        }
        let outputs = node.op.eval(inputs)?;
        self.outputs[node.id] = Some(outputs);
        Ok(())
    }

    pub fn take_by_name(&mut self, name: &str) -> Result<Vec<Matrix>> {
        let id = self.model.node_id_by_name(name)?;
        Self::take(self, id)
    }

    pub fn take(&mut self, id: usize) -> Result<Vec<Matrix>> {
        Ok(self.outputs[id]
            .take()
            .ok_or("Value is not computed")?
            .into_iter()
            .map(Input::into_matrix)
            .collect())
    }

    /// Main entrypoint for running a network.
    ///
    /// Clears the internal state.
    pub fn run(&mut self, inputs: Vec<(usize, Matrix)>, output: usize) -> Result<Vec<Matrix>> {
        self.reset()?;
        for input in inputs {
            self.set_value(input.0, input.1)?;
        }
        Plan::for_node(self.model, output)?.run(self)?;
        Ok(self.take(output)?)
    }

    pub fn model(&self) -> &Model {
        self.model
    }
}