pub struct ModelRuntime { /* private fields */ }Expand description
Model runtime for executing inference.
Implementations§
Source§impl ModelRuntime
impl ModelRuntime
Sourcepub fn new(config: ModelConfig) -> Self
pub fn new(config: ModelConfig) -> Self
Create a new model runtime.
Examples found in repository?
examples/full_inference_pipeline.rs (line 67)
55fn build_model() {
56 println!("2. Model Building");
57
58 let model_config = ModelConfig {
59 name: "ResNet18-Lite".to_string(),
60 input_shape: vec![1, 224, 224, 3],
61 output_shape: vec![1, 1000],
62 quant_format: QuantFormat::Int8,
63 optimization_level: OptimizationLevel::O3,
64 use_cache: true,
65 };
66
67 let runtime = ModelRuntime::new(model_config);
68 println!(" Model: {}", runtime.get_config().name);
69 println!(" Input: {:?}", runtime.input_shape());
70 println!(" Output: {:?}", runtime.output_shape());
71
72 let mut network = NeuralNetwork::new(runtime.get_config().name.clone());
73
74 network.add_layer(Layer::new(
75 "stem_conv".to_string(),
76 LayerType::Convolution,
77 vec![1, 224, 224, 3],
78 vec![1, 112, 112, 64],
79 ));
80
81 network.add_layer(Layer::new(
82 "residual_block_1".to_string(),
83 LayerType::PointwiseConvolution,
84 vec![1, 112, 112, 64],
85 vec![1, 112, 112, 64],
86 ));
87
88 network.add_layer(Layer::new(
89 "residual_block_2".to_string(),
90 LayerType::PointwiseConvolution,
91 vec![1, 56, 56, 128],
92 vec![1, 56, 56, 128],
93 ));
94
95 network.add_layer(Layer::new(
96 "global_avg_pool".to_string(),
97 LayerType::Pooling,
98 vec![1, 7, 7, 512],
99 vec![1, 512],
100 ));
101
102 network.add_layer(Layer::new(
103 "classifier".to_string(),
104 LayerType::FullyConnected,
105 vec![1, 512],
106 vec![1, 1000],
107 ));
108
109 println!(" Layers: {}", network.layer_count());
110 println!(" Estimated TOPS: {:.6}\n", network.total_tops());
111}Sourcepub fn load_from_path(_path: &str) -> Result<Self>
pub fn load_from_path(_path: &str) -> Result<Self>
Load model from path.
Sourcepub fn get_config(&self) -> &ModelConfig
pub fn get_config(&self) -> &ModelConfig
Get model configuration.
Examples found in repository?
examples/full_inference_pipeline.rs (line 68)
55fn build_model() {
56 println!("2. Model Building");
57
58 let model_config = ModelConfig {
59 name: "ResNet18-Lite".to_string(),
60 input_shape: vec![1, 224, 224, 3],
61 output_shape: vec![1, 1000],
62 quant_format: QuantFormat::Int8,
63 optimization_level: OptimizationLevel::O3,
64 use_cache: true,
65 };
66
67 let runtime = ModelRuntime::new(model_config);
68 println!(" Model: {}", runtime.get_config().name);
69 println!(" Input: {:?}", runtime.input_shape());
70 println!(" Output: {:?}", runtime.output_shape());
71
72 let mut network = NeuralNetwork::new(runtime.get_config().name.clone());
73
74 network.add_layer(Layer::new(
75 "stem_conv".to_string(),
76 LayerType::Convolution,
77 vec![1, 224, 224, 3],
78 vec![1, 112, 112, 64],
79 ));
80
81 network.add_layer(Layer::new(
82 "residual_block_1".to_string(),
83 LayerType::PointwiseConvolution,
84 vec![1, 112, 112, 64],
85 vec![1, 112, 112, 64],
86 ));
87
88 network.add_layer(Layer::new(
89 "residual_block_2".to_string(),
90 LayerType::PointwiseConvolution,
91 vec![1, 56, 56, 128],
92 vec![1, 56, 56, 128],
93 ));
94
95 network.add_layer(Layer::new(
96 "global_avg_pool".to_string(),
97 LayerType::Pooling,
98 vec![1, 7, 7, 512],
99 vec![1, 512],
100 ));
101
102 network.add_layer(Layer::new(
103 "classifier".to_string(),
104 LayerType::FullyConnected,
105 vec![1, 512],
106 vec![1, 1000],
107 ));
108
109 println!(" Layers: {}", network.layer_count());
110 println!(" Estimated TOPS: {:.6}\n", network.total_tops());
111}Sourcepub fn input_shape(&self) -> &[usize]
pub fn input_shape(&self) -> &[usize]
Get input shape.
Examples found in repository?
examples/full_inference_pipeline.rs (line 69)
55fn build_model() {
56 println!("2. Model Building");
57
58 let model_config = ModelConfig {
59 name: "ResNet18-Lite".to_string(),
60 input_shape: vec![1, 224, 224, 3],
61 output_shape: vec![1, 1000],
62 quant_format: QuantFormat::Int8,
63 optimization_level: OptimizationLevel::O3,
64 use_cache: true,
65 };
66
67 let runtime = ModelRuntime::new(model_config);
68 println!(" Model: {}", runtime.get_config().name);
69 println!(" Input: {:?}", runtime.input_shape());
70 println!(" Output: {:?}", runtime.output_shape());
71
72 let mut network = NeuralNetwork::new(runtime.get_config().name.clone());
73
74 network.add_layer(Layer::new(
75 "stem_conv".to_string(),
76 LayerType::Convolution,
77 vec![1, 224, 224, 3],
78 vec![1, 112, 112, 64],
79 ));
80
81 network.add_layer(Layer::new(
82 "residual_block_1".to_string(),
83 LayerType::PointwiseConvolution,
84 vec![1, 112, 112, 64],
85 vec![1, 112, 112, 64],
86 ));
87
88 network.add_layer(Layer::new(
89 "residual_block_2".to_string(),
90 LayerType::PointwiseConvolution,
91 vec![1, 56, 56, 128],
92 vec![1, 56, 56, 128],
93 ));
94
95 network.add_layer(Layer::new(
96 "global_avg_pool".to_string(),
97 LayerType::Pooling,
98 vec![1, 7, 7, 512],
99 vec![1, 512],
100 ));
101
102 network.add_layer(Layer::new(
103 "classifier".to_string(),
104 LayerType::FullyConnected,
105 vec![1, 512],
106 vec![1, 1000],
107 ));
108
109 println!(" Layers: {}", network.layer_count());
110 println!(" Estimated TOPS: {:.6}\n", network.total_tops());
111}Sourcepub fn output_shape(&self) -> &[usize]
pub fn output_shape(&self) -> &[usize]
Get output shape.
Examples found in repository?
examples/full_inference_pipeline.rs (line 70)
55fn build_model() {
56 println!("2. Model Building");
57
58 let model_config = ModelConfig {
59 name: "ResNet18-Lite".to_string(),
60 input_shape: vec![1, 224, 224, 3],
61 output_shape: vec![1, 1000],
62 quant_format: QuantFormat::Int8,
63 optimization_level: OptimizationLevel::O3,
64 use_cache: true,
65 };
66
67 let runtime = ModelRuntime::new(model_config);
68 println!(" Model: {}", runtime.get_config().name);
69 println!(" Input: {:?}", runtime.input_shape());
70 println!(" Output: {:?}", runtime.output_shape());
71
72 let mut network = NeuralNetwork::new(runtime.get_config().name.clone());
73
74 network.add_layer(Layer::new(
75 "stem_conv".to_string(),
76 LayerType::Convolution,
77 vec![1, 224, 224, 3],
78 vec![1, 112, 112, 64],
79 ));
80
81 network.add_layer(Layer::new(
82 "residual_block_1".to_string(),
83 LayerType::PointwiseConvolution,
84 vec![1, 112, 112, 64],
85 vec![1, 112, 112, 64],
86 ));
87
88 network.add_layer(Layer::new(
89 "residual_block_2".to_string(),
90 LayerType::PointwiseConvolution,
91 vec![1, 56, 56, 128],
92 vec![1, 56, 56, 128],
93 ));
94
95 network.add_layer(Layer::new(
96 "global_avg_pool".to_string(),
97 LayerType::Pooling,
98 vec![1, 7, 7, 512],
99 vec![1, 512],
100 ));
101
102 network.add_layer(Layer::new(
103 "classifier".to_string(),
104 LayerType::FullyConnected,
105 vec![1, 512],
106 vec![1, 1000],
107 ));
108
109 println!(" Layers: {}", network.layer_count());
110 println!(" Estimated TOPS: {:.6}\n", network.total_tops());
111}Sourcepub fn validate_input(&self, shape: &[usize]) -> Result<()>
pub fn validate_input(&self, shape: &[usize]) -> Result<()>
Validate input dimensions.
Auto Trait Implementations§
impl Freeze for ModelRuntime
impl RefUnwindSafe for ModelRuntime
impl Send for ModelRuntime
impl Sync for ModelRuntime
impl Unpin for ModelRuntime
impl UnwindSafe for ModelRuntime
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more