edge_impulse_runner/
types.rs

1//! Common types and parameters used throughout the Edge Impulse Runner.
2//!
3//! This module contains the core data structures that define model configuration,
4//! project information, and performance metrics. These types are used to configure
5//! the model and interpret its outputs.
6
7use serde::Deserialize;
8use serde::Serialize;
9
10/// Enum representing different types of anomaly detection supported by the model
11#[derive(Debug, Clone, Copy, PartialEq, Eq)]
12pub enum RunnerHelloHasAnomaly {
13    None = 0,
14    KMeans = 1,
15    GMM = 2,
16    VisualGMM = 3,
17}
18
19impl From<u32> for RunnerHelloHasAnomaly {
20    fn from(value: u32) -> Self {
21        match value {
22            0 => Self::None,
23            1 => Self::KMeans,
24            2 => Self::GMM,
25            3 => Self::VisualGMM,
26            _ => Self::None,
27        }
28    }
29}
30
31/// Parameters that define a model's configuration and capabilities.
32///
33/// These parameters are received from the model during initialization and describe
34/// the model's input requirements, processing settings, and output characteristics.
35#[derive(Debug, Deserialize, Clone)]
36pub struct ModelParameters {
37    /// Number of axes for motion/positional data (e.g., 3 for xyz accelerometer)
38    pub axis_count: u32,
39    /// Sampling frequency in Hz for time-series data
40    pub frequency: f32,
41    /// Indicates if the model supports anomaly detection
42    #[serde(deserialize_with = "deserialize_anomaly_type")]
43    pub has_anomaly: RunnerHelloHasAnomaly,
44    /// Indicates if the model supports object tracking (0 = no, 1 = yes)
45    #[serde(default)]
46    pub has_object_tracking: bool,
47    /// Number of color channels in input images (1 = grayscale, 3 = RGB)
48    pub image_channel_count: u32,
49    /// Number of consecutive frames required for video input
50    pub image_input_frames: u32,
51    /// Required height of input images in pixels
52    pub image_input_height: u32,
53    /// Required width of input images in pixels
54    pub image_input_width: u32,
55    /// Method used to resize input images ("fit" or "fill")
56    pub image_resize_mode: String,
57    /// Type of inferencing engine (0 = TensorFlow Lite, 1 = TensorFlow.js)
58    pub inferencing_engine: u32,
59    /// Total number of input features expected by the model
60    pub input_features_count: u32,
61    /// Time interval between samples in milliseconds
62    pub interval_ms: f32,
63    /// Number of classification labels
64    pub label_count: u32,
65    /// Vector of classification labels
66    pub labels: Vec<String>,
67    /// Type of model ("classification", "object-detection", etc.)
68    pub model_type: String,
69    /// Type of input sensor (see SensorType enum)
70    pub sensor: i32,
71    /// Size of the processing window for time-series data
72    pub slice_size: u32,
73    /// Vector of thresholds for different types of detections
74    #[serde(default)]
75    pub thresholds: Vec<ModelThreshold>,
76    /// Whether the model supports continuous mode operation
77    pub use_continuous_mode: bool,
78}
79
80impl Default for ModelParameters {
81    fn default() -> Self {
82        Self {
83            axis_count: 0,
84            frequency: 0.0,
85            has_anomaly: RunnerHelloHasAnomaly::None,
86            has_object_tracking: false,
87            image_channel_count: 0,
88            image_input_frames: 1,
89            image_input_height: 0,
90            image_input_width: 0,
91            image_resize_mode: String::from("fit"),
92            inferencing_engine: 0,
93            input_features_count: 0,
94            interval_ms: 0.0,
95            label_count: 0,
96            labels: Vec::new(),
97            model_type: String::from("classification"),
98            sensor: -1,
99            slice_size: 0,
100            thresholds: Vec::new(),
101            use_continuous_mode: false,
102        }
103    }
104}
105
106fn deserialize_anomaly_type<'de, D>(deserializer: D) -> Result<RunnerHelloHasAnomaly, D::Error>
107where
108    D: serde::Deserializer<'de>,
109{
110    let value = u32::deserialize(deserializer)?;
111    Ok(RunnerHelloHasAnomaly::from(value))
112}
113
114#[derive(Debug, Deserialize, Clone)]
115#[serde(tag = "type")]
116pub enum ModelThreshold {
117    #[serde(rename = "object_detection")]
118    ObjectDetection { id: u32, min_score: f32 },
119    #[serde(rename = "anomaly_gmm")]
120    AnomalyGMM { id: u32, min_anomaly_score: f32 },
121    #[serde(rename = "object_tracking")]
122    ObjectTracking {
123        id: u32,
124        keep_grace: u32,
125        max_observations: u32,
126        threshold: f32,
127    },
128}
129
130impl Default for ModelThreshold {
131    fn default() -> Self {
132        Self::ObjectDetection {
133            id: 0,
134            min_score: 0.5,
135        }
136    }
137}
138
139/// Information about the Edge Impulse project that created the model.
140///
141/// Contains metadata about the project's origin and version.
142#[derive(Deserialize, Debug)]
143pub struct ProjectInfo {
144    /// Version number of the deployment
145    pub deploy_version: u32,
146    /// Unique project identifier
147    pub id: u32,
148    /// Name of the project
149    pub name: String,
150    /// Username of the project owner
151    pub owner: String,
152}
153
154/// Performance timing information for different processing stages.
155///
156/// Provides detailed timing breakdowns for each step of the inference pipeline,
157/// useful for performance monitoring and optimization.
158#[derive(Deserialize, Debug)]
159pub struct TimingInfo {
160    /// Time spent on digital signal processing (DSP) in microseconds
161    pub dsp: u32,
162    /// Time spent on classification inference in microseconds
163    pub classification: u32,
164    /// Time spent on anomaly detection in microseconds
165    pub anomaly: u32,
166    /// Time spent on JSON serialization/deserialization in microseconds
167    pub json: u32,
168    /// Time spent reading from standard input in microseconds
169    pub stdin: u32,
170}
171
172/// Represents a detected object's location and classification.
173///
174/// Used in object detection models to specify where objects were found
175/// in an image and their classification details.
176#[derive(Debug, Deserialize, Serialize)]
177pub struct BoundingBox {
178    /// Height of the bounding box in pixels
179    pub height: i32,
180    /// Classification label for the detected object
181    pub label: String,
182    /// Confidence score for the detection (0.0 to 1.0)
183    pub value: f32,
184    /// Width of the bounding box in pixels
185    pub width: i32,
186    /// X-coordinate of the top-left corner
187    pub x: i32,
188    /// Y-coordinate of the top-left corner
189    pub y: i32,
190}
191
192/// Represents the normalized results of visual anomaly detection
193pub type VisualAnomalyResult = (f32, f32, f32, Vec<(f32, u32, u32, u32, u32)>);
194
195/// Represents the type of sensor used for data collection.
196///
197/// This enum defines the supported sensor types for Edge Impulse models,
198/// mapping to the numeric values used in the protocol:
199/// - -1 or unknown: Unknown
200/// - 1: Microphone
201/// - 2: Accelerometer
202/// - 3: Camera
203/// - 4: Positional
204#[derive(Debug, Clone, Copy, PartialEq)]
205pub enum SensorType {
206    /// Unknown or unsupported sensor type (-1 or default)
207    Unknown = -1,
208    /// Microphone sensor for audio input (1)
209    Microphone = 1,
210    /// Accelerometer sensor for motion data (2)
211    Accelerometer = 2,
212    /// Camera sensor for image/video input (3)
213    Camera = 3,
214    /// Positional sensor for location/orientation data (4)
215    Positional = 4,
216}
217
218impl From<i32> for SensorType {
219    fn from(value: i32) -> Self {
220        match value {
221            -1 => SensorType::Unknown,
222            1 => SensorType::Microphone,
223            2 => SensorType::Accelerometer,
224            3 => SensorType::Camera,
225            4 => SensorType::Positional,
226            _ => SensorType::Unknown,
227        }
228    }
229}