[][src]Trait opencv::dnn::NetTrait

pub trait NetTrait {
    fn as_raw_Net(&self) -> *mut c_void;

    fn empty(&self) -> Result<bool> { ... }
fn dump(&mut self) -> Result<String> { ... }
fn dump_to_file(&mut self, path: &str) -> Result<()> { ... }
fn add_layer(
        &mut self,
        name: &str,
        _type: &str,
        params: &mut LayerParams
    ) -> Result<i32> { ... }
fn add_layer_to_prev(
        &mut self,
        name: &str,
        _type: &str,
        params: &mut LayerParams
    ) -> Result<i32> { ... }
fn get_layer_id(&mut self, layer: &str) -> Result<i32> { ... }
fn get_layer_names(&self) -> Result<VectorOfString> { ... }
fn get_layer(&mut self, layer_id: &DictValue) -> Result<PtrOfLayer> { ... }
fn get_layer_inputs(
        &mut self,
        layer_id: &DictValue
    ) -> Result<VectorOfPtrOfLayer> { ... }
fn connect_first_second(
        &mut self,
        out_pin: &str,
        inp_pin: &str
    ) -> Result<()> { ... }
fn connect(
        &mut self,
        out_layer_id: i32,
        out_num: i32,
        inp_layer_id: i32,
        inp_num: i32
    ) -> Result<()> { ... }
fn set_inputs_names(
        &mut self,
        input_blob_names: &VectorOfString
    ) -> Result<()> { ... }
fn forward(&mut self, output_name: &str) -> Result<Mat> { ... }
fn forward_layer(
        &mut self,
        output_blobs: &mut dyn ToOutputArray,
        output_name: &str
    ) -> Result<()> { ... }
fn forward_first_outputs(
        &mut self,
        output_blobs: &mut dyn ToOutputArray,
        out_blob_names: &VectorOfString
    ) -> Result<()> { ... }
fn forward_all(
        &mut self,
        output_blobs: &mut VectorOfVectorOfMat,
        out_blob_names: &VectorOfString
    ) -> Result<()> { ... }
fn set_halide_scheduler(&mut self, scheduler: &str) -> Result<()> { ... }
fn set_preferable_backend(&mut self, backend_id: i32) -> Result<()> { ... }
fn set_preferable_target(&mut self, target_id: i32) -> Result<()> { ... }
fn set_input(
        &mut self,
        blob: &dyn ToInputArray,
        name: &str,
        scalefactor: f64,
        mean: Scalar
    ) -> Result<()> { ... }
fn set_param(
        &mut self,
        layer: &DictValue,
        num_param: i32,
        blob: &Mat
    ) -> Result<()> { ... }
fn get_param(&mut self, layer: &DictValue, num_param: i32) -> Result<Mat> { ... }
fn get_unconnected_out_layers(&self) -> Result<VectorOfint> { ... }
fn get_unconnected_out_layers_names(&self) -> Result<VectorOfString> { ... }
fn get_layers_shapes(
        &self,
        net_input_shapes: &VectorOfVectorOfint,
        layers_ids: &mut VectorOfint,
        in_layers_shapes: &mut VectorOfVectorOfVectorOfint,
        out_layers_shapes: &mut VectorOfVectorOfVectorOfint
    ) -> Result<()> { ... }
fn get_layer_shapes(
        &self,
        net_input_shapes: &VectorOfVectorOfint,
        layer_id: i32,
        in_layer_shapes: &mut VectorOfVectorOfint,
        out_layer_shapes: &mut VectorOfVectorOfint
    ) -> Result<()> { ... }
fn get_flops(&self, net_input_shapes: &VectorOfVectorOfint) -> Result<i64> { ... }
fn get_flops_1(
        &self,
        layer_id: i32,
        net_input_shapes: &VectorOfVectorOfint
    ) -> Result<i64> { ... }
fn get_layer_types(&self, layers_types: &mut VectorOfString) -> Result<()> { ... }
fn get_layers_count(&self, layer_type: &str) -> Result<i32> { ... }
fn get_memory_consumption(
        &self,
        net_input_shapes: &VectorOfVectorOfint,
        weights: &mut size_t,
        blobs: &mut size_t
    ) -> Result<()> { ... }
fn get_memory_consumption_for_layer(
        &self,
        layer_id: i32,
        net_input_shapes: &VectorOfVectorOfint,
        weights: &mut size_t,
        blobs: &mut size_t
    ) -> Result<()> { ... }
fn get_memory_consumption_for_layers(
        &self,
        net_input_shapes: &VectorOfVectorOfint,
        layer_ids: &mut VectorOfint,
        weights: &mut VectorOfsize_t,
        blobs: &mut VectorOfsize_t
    ) -> Result<()> { ... }
fn enable_fusion(&mut self, fusion: bool) -> Result<()> { ... }
fn get_perf_profile(&mut self, timings: &mut VectorOfdouble) -> Result<i64> { ... } }

This class allows to create and manipulate comprehensive artificial neural networks.

Neural network is presented as directed acyclic graph (DAG), where vertices are Layer instances, and edges specify relationships between layers inputs and outputs.

Each network layer has unique integer id and unique string name inside its network. LayerId can store either layer name or layer id.

This class supports reference counting of its instances, i. e. copies point to the same instance.

Required methods

fn as_raw_Net(&self) -> *mut c_void

Loading content...

Provided methods

fn empty(&self) -> Result<bool>

Returns true if there are no layers in the network.

fn dump(&mut self) -> Result<String>

Dump net to String

Returns

String with structure, hyperparameters, backend, target and fusion Call method after setInput(). To see correct backend, target and fusion run after forward().

fn dump_to_file(&mut self, path: &str) -> Result<()>

Dump net structure, hyperparameters, backend, target and fusion to dot file

Parameters

  • path: path to output file with .dot extension @see dump()

fn add_layer(
    &mut self,
    name: &str,
    _type: &str,
    params: &mut LayerParams
) -> Result<i32>

Adds new layer to the net.

Parameters

  • name: unique name of the adding layer.
  • type: typename of the adding layer (type must be registered in LayerRegister).
  • params: parameters which will be used to initialize the creating layer.

Returns

unique identifier of created layer, or -1 if a failure will happen.

fn add_layer_to_prev(
    &mut self,
    name: &str,
    _type: &str,
    params: &mut LayerParams
) -> Result<i32>

Adds new layer and connects its first input to the first output of previously added layer. @see addLayer()

fn get_layer_id(&mut self, layer: &str) -> Result<i32>

Converts string name of the layer to the integer identifier.

Returns

id of the layer, or -1 if the layer wasn't found.

fn get_layer_names(&self) -> Result<VectorOfString>

fn get_layer(&mut self, layer_id: &DictValue) -> Result<PtrOfLayer>

Returns pointer to layer with specified id or name which the network use.

fn get_layer_inputs(
    &mut self,
    layer_id: &DictValue
) -> Result<VectorOfPtrOfLayer>

Returns pointers to input layers of specific layer.

fn connect_first_second(&mut self, out_pin: &str, inp_pin: &str) -> Result<()>

Connects output of the first layer to input of the second layer.

Parameters

  • outPin: descriptor of the first layer output.
  • inpPin: descriptor of the second layer input.

Descriptors have the following template <layer_name>[.input_number]:

  • the first part of the template layer_name is string name of the added layer. If this part is empty then the network input pseudo layer will be used;
  • the second optional part of the template input_number is either number of the layer input, either label one. If this part is omitted then the first layer input will be used.

@see setNetInputs(), Layer::inputNameToIndex(), Layer::outputNameToIndex()

fn connect(
    &mut self,
    out_layer_id: i32,
    out_num: i32,
    inp_layer_id: i32,
    inp_num: i32
) -> Result<()>

Connects #@p outNum output of the first layer to #@p inNum input of the second layer.

Parameters

  • outLayerId: identifier of the first layer
  • outNum: number of the first layer output
  • inpLayerId: identifier of the second layer
  • inpNum: number of the second layer input

fn set_inputs_names(&mut self, input_blob_names: &VectorOfString) -> Result<()>

Sets outputs names of the network input pseudo layer.

Each net always has special own the network input pseudo layer with id=0. This layer stores the user blobs only and don't make any computations. In fact, this layer provides the only way to pass user data into the network. As any other layer, this layer can label its outputs and this function provides an easy way to do this.

fn forward(&mut self, output_name: &str) -> Result<Mat>

Runs forward pass to compute output of layer with name @p outputName.

Parameters

  • outputName: name for layer which output is needed to get

Returns

blob for first output of specified layer. @details By default runs forward pass for the whole network.

C++ default parameters

  • output_name: String()

fn forward_layer(
    &mut self,
    output_blobs: &mut dyn ToOutputArray,
    output_name: &str
) -> Result<()>

Runs forward pass to compute output of layer with name @p outputName.

Parameters

  • outputBlobs: contains all output blobs for specified layer.
  • outputName: name for layer which output is needed to get @details If @p outputName is empty, runs forward pass for the whole network.

C++ default parameters

  • output_name: String()

fn forward_first_outputs(
    &mut self,
    output_blobs: &mut dyn ToOutputArray,
    out_blob_names: &VectorOfString
) -> Result<()>

Runs forward pass to compute outputs of layers listed in @p outBlobNames.

Parameters

  • outputBlobs: contains blobs for first outputs of specified layers.
  • outBlobNames: names for layers which outputs are needed to get

fn forward_all(
    &mut self,
    output_blobs: &mut VectorOfVectorOfMat,
    out_blob_names: &VectorOfString
) -> Result<()>

Runs forward pass to compute outputs of layers listed in @p outBlobNames.

Parameters

  • outputBlobs: contains all output blobs for each layer specified in @p outBlobNames.
  • outBlobNames: names for layers which outputs are needed to get

fn set_halide_scheduler(&mut self, scheduler: &str) -> Result<()>

Compile Halide layers.

Parameters

  • scheduler: Path to YAML file with scheduling directives. @see setPreferableBackend

Schedule layers that support Halide backend. Then compile them for specific target. For layers that not represented in scheduling file or if no manual scheduling used at all, automatic scheduling will be applied.

fn set_preferable_backend(&mut self, backend_id: i32) -> Result<()>

Ask network to use specific computation backend where it supported.

Parameters

  • backendId: backend identifier. @see Backend

If OpenCV is compiled with Intel's Inference Engine library, DNN_BACKEND_DEFAULT means DNN_BACKEND_INFERENCE_ENGINE. Otherwise it equals to DNN_BACKEND_OPENCV.

fn set_preferable_target(&mut self, target_id: i32) -> Result<()>

Ask network to make computations on specific target device.

Parameters

  • targetId: target identifier. @see Target

List of supported combinations backend / target: | | DNN_BACKEND_OPENCV | DNN_BACKEND_INFERENCE_ENGINE | DNN_BACKEND_HALIDE | DNN_BACKEND_CUDA | |------------------------|--------------------|------------------------------|--------------------|-------------------| | DNN_TARGET_CPU | + | + | + | | | DNN_TARGET_OPENCL | + | + | + | | | DNN_TARGET_OPENCL_FP16 | + | + | | | | DNN_TARGET_MYRIAD | | + | | | | DNN_TARGET_FPGA | | + | | | | DNN_TARGET_CUDA | | | | + | | DNN_TARGET_CUDA_FP16 | | | | + |

fn set_input(
    &mut self,
    blob: &dyn ToInputArray,
    name: &str,
    scalefactor: f64,
    mean: Scalar
) -> Result<()>

Sets the new input value for the network

Parameters

  • blob: A new blob. Should have CV_32F or CV_8U depth.
  • name: A name of input layer.
  • scalefactor: An optional normalization scale.
  • mean: An optional mean subtraction values. @see connect(String, String) to know format of the descriptor.

If scale or mean values are specified, a final input blob is computed as: block formula

C++ default parameters

  • name: ""
  • scalefactor: 1.0
  • mean: Scalar()

fn set_param(
    &mut self,
    layer: &DictValue,
    num_param: i32,
    blob: &Mat
) -> Result<()>

Sets the new value for the learned param of the layer.

Parameters

  • layer: name or id of the layer.
  • numParam: index of the layer parameter in the Layer::blobs array.
  • blob: the new value. @see Layer::blobs

Note: If shape of the new blob differs from the previous shape, then the following forward pass may fail.

fn get_param(&mut self, layer: &DictValue, num_param: i32) -> Result<Mat>

Returns parameter blob of the layer.

Parameters

  • layer: name or id of the layer.
  • numParam: index of the layer parameter in the Layer::blobs array. @see Layer::blobs

C++ default parameters

  • num_param: 0

fn get_unconnected_out_layers(&self) -> Result<VectorOfint>

Returns indexes of layers with unconnected outputs.

fn get_unconnected_out_layers_names(&self) -> Result<VectorOfString>

Returns names of layers with unconnected outputs.

fn get_layers_shapes(
    &self,
    net_input_shapes: &VectorOfVectorOfint,
    layers_ids: &mut VectorOfint,
    in_layers_shapes: &mut VectorOfVectorOfVectorOfint,
    out_layers_shapes: &mut VectorOfVectorOfVectorOfint
) -> Result<()>

Returns input and output shapes for all layers in loaded model; preliminary inferencing isn't necessary.

Parameters

  • netInputShapes: shapes for all input blobs in net input layer.
  • layersIds: output parameter for layer IDs.
  • inLayersShapes: output parameter for input layers shapes; order is the same as in layersIds
  • outLayersShapes: output parameter for output layers shapes; order is the same as in layersIds

fn get_layer_shapes(
    &self,
    net_input_shapes: &VectorOfVectorOfint,
    layer_id: i32,
    in_layer_shapes: &mut VectorOfVectorOfint,
    out_layer_shapes: &mut VectorOfVectorOfint
) -> Result<()>

fn get_flops(&self, net_input_shapes: &VectorOfVectorOfint) -> Result<i64>

Computes FLOP for whole loaded model with specified input shapes.

Parameters

  • netInputShapes: vector of shapes for all net inputs.

Returns

computed FLOP.

fn get_flops_1(
    &self,
    layer_id: i32,
    net_input_shapes: &VectorOfVectorOfint
) -> Result<i64>

fn get_layer_types(&self, layers_types: &mut VectorOfString) -> Result<()>

Returns list of types for layer used in model.

Parameters

  • layersTypes: output parameter for returning types.

fn get_layers_count(&self, layer_type: &str) -> Result<i32>

Returns count of layers of specified type.

Parameters

  • layerType: type.

Returns

count of layers

fn get_memory_consumption(
    &self,
    net_input_shapes: &VectorOfVectorOfint,
    weights: &mut size_t,
    blobs: &mut size_t
) -> Result<()>

Computes bytes number which are required to store all weights and intermediate blobs for model.

Parameters

  • netInputShapes: vector of shapes for all net inputs.
  • weights: output parameter to store resulting bytes for weights.
  • blobs: output parameter to store resulting bytes for intermediate blobs.

fn get_memory_consumption_for_layer(
    &self,
    layer_id: i32,
    net_input_shapes: &VectorOfVectorOfint,
    weights: &mut size_t,
    blobs: &mut size_t
) -> Result<()>

fn get_memory_consumption_for_layers(
    &self,
    net_input_shapes: &VectorOfVectorOfint,
    layer_ids: &mut VectorOfint,
    weights: &mut VectorOfsize_t,
    blobs: &mut VectorOfsize_t
) -> Result<()>

Computes bytes number which are required to store all weights and intermediate blobs for each layer.

Parameters

  • netInputShapes: vector of shapes for all net inputs.
  • layerIds: output vector to save layer IDs.
  • weights: output parameter to store resulting bytes for weights.
  • blobs: output parameter to store resulting bytes for intermediate blobs.

fn enable_fusion(&mut self, fusion: bool) -> Result<()>

Enables or disables layer fusion in the network.

Parameters

  • fusion: true to enable the fusion, false to disable. The fusion is enabled by default.

fn get_perf_profile(&mut self, timings: &mut VectorOfdouble) -> Result<i64>

Returns overall time for inference and timings (in ticks) for layers. Indexes in returned vector correspond to layers ids. Some layers can be fused with others, in this case zero ticks count will be return for that skipped layers.

Parameters

  • timings: vector for tick timings for all layers.

Returns

overall ticks for model inference.

Loading content...

Implementors

impl NetTrait for ClassificationModel[src]

impl NetTrait for DetectionModel[src]

impl NetTrait for KeypointsModel[src]

impl NetTrait for Model[src]

impl NetTrait for Net[src]

impl NetTrait for SegmentationModel[src]

Loading content...