vexide_devices/smart/
vision.rs

1//! Vision Sensor
2//!
3//! This module provides an interface for interacting with the VEX Vision Sensor.
4//!
5//! # Hardware Overview
6//!
7//! The VEX Vision Sensor is a device powered by an ARM Cortex M4 and Cortex M0 coprocessor
8//! with a color camera for the purpose of performing object recognition. The sensor can be
9//! trained to locate objects by color. The camera module itself is very similar internally
10//! to the Pixy2 camera, and performs its own onboard image processing. Manually processing
11//! raw image data from the sensor is not currently possible.
12//!
13//! Every 20 milliseconds, the camera provides a list of the objects found matching up
14//! to seven unique [`VisionSignature`]s. The object’s height, width, and location is provided.
15//! Multi-colored objects may also be programmed through the use of [`VisionCode`]s.
16//!
17//! The Vision Sensor has USB for a direct connection to a computer, where it can be configured
18//! using VEX's proprietary vision utility tool to generate color signatures. The Vision Sensor
19//! also has Wi-Fi Direct and can act as web server, allowing a live video feed of the camera
20//! from any computer equipped with a browser and Wi-Fi.
21
22extern crate alloc;
23
24use alloc::vec::Vec;
25use core::time::Duration;
26
27use snafu::{ensure, Snafu};
28use vex_sdk::{
29    vexDeviceVisionBrightnessGet, vexDeviceVisionBrightnessSet, vexDeviceVisionLedColorGet,
30    vexDeviceVisionLedColorSet, vexDeviceVisionLedModeGet, vexDeviceVisionLedModeSet,
31    vexDeviceVisionModeGet, vexDeviceVisionModeSet, vexDeviceVisionObjectCountGet,
32    vexDeviceVisionObjectGet, vexDeviceVisionSignatureGet, vexDeviceVisionSignatureSet,
33    vexDeviceVisionWhiteBalanceGet, vexDeviceVisionWhiteBalanceModeGet,
34    vexDeviceVisionWhiteBalanceModeSet, vexDeviceVisionWhiteBalanceSet, vexDeviceVisionWifiModeGet,
35    vexDeviceVisionWifiModeSet, V5VisionBlockType, V5VisionLedMode, V5VisionMode, V5VisionWBMode,
36    V5VisionWifiMode, V5_DeviceT, V5_DeviceVisionObject, V5_DeviceVisionRgb,
37    V5_DeviceVisionSignature,
38};
39
40use super::{SmartDevice, SmartDeviceType, SmartPort};
41use crate::{math::Point2, rgb::Rgb, PortError};
42
43/// VEX Vision Sensor
44///
45/// This struct represents a vision sensor plugged into a Smart Port.
46#[derive(Debug, Eq, PartialEq)]
47pub struct VisionSensor {
48    port: SmartPort,
49    codes: Vec<VisionCode>,
50    device: V5_DeviceT,
51}
52
53// SAFETY: Required because we store a raw pointer to the device handle to avoid it getting from the
54// SDK each device function. Simply sharing a raw pointer across threads is not inherently unsafe.
55unsafe impl Send for VisionSensor {}
56unsafe impl Sync for VisionSensor {}
57
58impl VisionSensor {
59    /// The horizontal resolution of the vision sensor.
60    ///
61    /// This value is based on the `VISION_FOV_WIDTH` macro constant in PROS.
62    pub const HORIZONTAL_RESOLUTION: u16 = 316;
63
64    /// The vertical resolution of the vision sensor.
65    ///
66    /// This value is based on the `VISION_FOV_HEIGHT` macro constant in PROS.
67    pub const VERTICAL_RESOLUTION: u16 = 212;
68
69    /// The horizontal FOV of the vision sensor in degrees.
70    pub const HORIZONTAL_FOV: f32 = 64.6;
71
72    /// The vertical FOV of the vision sensor in degrees.
73    pub const VERTICAL_FOV: f32 = 46.0;
74
75    /// The diagonal FOV of the vision sensor in degrees.
76    pub const DIAGONAL_FOV: f32 = 78.0;
77
78    /// The update rate of the vision sensor.
79    pub const UPDATE_INTERVAL: Duration = Duration::from_millis(50);
80
81    /// Creates a new vision sensor from a Smart Port.
82    ///
83    /// # Examples
84    ///
85    /// ```
86    /// use vexide::prelude::*;
87    ///
88    /// #[vexide::main]
89    /// async fn main(peripherals: Peripherals) {
90    ///     let sensor = VisionSensor::new(peripherals.port_1);
91    /// }
92    /// ```
93    #[must_use]
94    pub fn new(port: SmartPort) -> Self {
95        Self {
96            device: unsafe { port.device_handle() },
97            port,
98            codes: Vec::new(),
99        }
100    }
101
102    /// Adds a detection signature to the sensor's onboard memory.
103    ///
104    /// This signature will be used to identify objects when using [`VisionSensor::objects`].
105    ///
106    /// The sensor can store up to 7 unique signatures, with each signature slot denoted by the
107    /// `id` parameter. If a signature with an ID matching an existing signature
108    /// on the sensor is added, then the existing signature will be overwritten with the new one.
109    ///
110    /// # Volatile Memory
111    ///
112    /// The memory on the Vision Sensor is *volatile* and will therefore be wiped when the sensor
113    /// loses power. As a result, this function should be called every time the sensor is used on
114    /// program start.
115    ///
116    /// # Panics
117    ///
118    /// - Panics if the given signature ID is not in the interval [0, 7).
119    ///
120    /// # Errors
121    ///
122    /// - A [`VisionError::Port`] error is returned if a vision sensor is not currently connected to the Smart Port.
123    ///
124    /// # Examples
125    ///
126    /// ```
127    /// use vexide::prelude::*;
128    ///
129    /// #[vexide::main]
130    /// async fn main(peripherals: Peripherals) {
131    ///     let mut sensor = VisionSensor::new(peripherals.port_1);
132    ///
133    ///     // These signatures can be generated using VEX's vision utility.
134    ///     let example_signature = VisionSignature::new(
135    ///         (10049, 11513, 10781),
136    ///         (-425, 1, -212),
137    ///         4.1,
138    ///     );
139    ///
140    ///     // Set signature 1 one the sensor.
141    ///     _ = sensor.set_signature(1, example_signature);
142    /// }
143    /// ```
144    pub fn set_signature(&mut self, id: u8, signature: VisionSignature) -> Result<(), VisionError> {
145        assert!(
146            (1..7).contains(&id),
147            "The given signature ID `{id}` is not in the expected interval [0, 7)."
148        );
149        self.validate_port()?;
150
151        let mut signature = V5_DeviceVisionSignature {
152            id,
153            uMin: signature.u_threshold.0,
154            uMean: signature.u_threshold.1,
155            uMax: signature.u_threshold.2,
156            vMin: signature.v_threshold.0,
157            vMean: signature.v_threshold.1,
158            vMax: signature.v_threshold.2,
159            range: signature.range,
160            mType: u32::from(
161                if self.codes.iter().any(|code| code.contains_signature(id)) {
162                    V5VisionBlockType::kVisionTypeColorCode
163                } else {
164                    V5VisionBlockType::kVisionTypeNormal
165                }
166                .0,
167            ),
168            ..Default::default()
169        };
170
171        unsafe { vexDeviceVisionSignatureSet(self.device, &mut signature) }
172
173        Ok(())
174    }
175
176    /// Reads a signature off the sensor's onboard memory, returning `Some(sig)` if the slot is filled
177    /// or `None` if no signature is stored with the given ID.
178    fn read_raw_signature(&self, id: u8) -> Result<Option<V5_DeviceVisionSignature>, VisionError> {
179        assert!(
180            (1..7).contains(&id),
181            "The given signature ID `{id}` is not in the expected interval [0, 7)."
182        );
183
184        let mut raw_signature = V5_DeviceVisionSignature::default();
185        let read_operation =
186            unsafe { vexDeviceVisionSignatureGet(self.device, u32::from(id), &mut raw_signature) };
187
188        if !read_operation {
189            return Ok(None);
190        }
191
192        // pad[0] is actually an undocumented flags field on `V5_DeviceVisionSignature`. If the sensor returns
193        // no flags, then it has failed to send data back.
194        //
195        // TODO: Make sure this is correct and not the PROS docs being wrong here.
196        //
197        // We also check that the read operation succeeded from the return of `vexDeviceVisionSignatureGet`.
198        ensure!(raw_signature.pad[0] != 0, ReadingFailedSnafu);
199
200        Ok(Some(raw_signature))
201    }
202
203    /// Adjusts the type of a signature stored on the sensor.
204    ///
205    /// This is used when assigning certain stored signatures as belonging to color codes.
206    fn write_signature_type(&mut self, id: u8, sig_type: u32) -> Result<(), VisionError> {
207        if let Some(mut sig) = self.read_raw_signature(id)? {
208            sig.mType = sig_type;
209            unsafe { vexDeviceVisionSignatureSet(self.device, &mut sig) }
210        } else {
211            return ReadingFailedSnafu.fail();
212        }
213
214        Ok(())
215    }
216
217    /// Returns a signature from the sensor's onboard volatile memory.
218    ///
219    /// # Panics
220    ///
221    /// - Panics if the given signature ID is not in the interval [0, 7).
222    ///
223    /// # Errors
224    ///
225    /// - A [`VisionError::Port`] error is returned if a vision sensor is not currently connected to the Smart Port.
226    /// - A [`VisionError::ReadingFailed`] error is returned if the read operation failed.
227    ///
228    /// # Examples
229    ///
230    /// ```
231    /// use vexide::prelude::*;
232    ///
233    /// #[vexide::main]
234    /// async fn main(peripherals: Peripherals) {
235    ///     let mut sensor = VisionSensor::new(peripherals.port_1);
236    ///
237    ///     // Set an example signature in the sensor's first slot.
238    ///     _ = sensor.set_signature(1, VisionSignature::new(
239    ///         (10049, 11513, 10781),
240    ///         (-425, 1, -212),
241    ///         4.1,
242    ///     ));
243    ///
244    ///     // Read signature 1 off the sensor.
245    ///     // This should be the same as the one we just set.
246    ///     if let Ok(Some(sig)) = sensor.signature(1) {
247    ///         println!("{:?}", sig);
248    ///     }
249    /// }
250    /// ```
251    pub fn signature(&self, id: u8) -> Result<Option<VisionSignature>, VisionError> {
252        self.validate_port()?;
253
254        Ok(self.read_raw_signature(id)?.map(Into::into))
255    }
256
257    /// Returns all signatures currently stored on the sensor's onboard volatile memory.
258    ///
259    /// # Errors
260    ///
261    /// - A [`VisionError::Port`] error is returned if a vision sensor is not currently connected to the Smart Port.
262    /// - A [`VisionError::ReadingFailed`] error is returned if the read operation failed.
263    ///
264    /// # Examples
265    ///
266    /// ```
267    /// use vexide::prelude::*;
268    ///
269    /// #[vexide::main]
270    /// async fn main(peripherals: Peripherals) {
271    ///     let mut sensor = VisionSensor::new(peripherals.port_1);
272    ///
273    ///     // A bunch of random color signatures.
274    ///     let sig_1 = VisionSignature::new((10049, 11513, 10781), (-425, 1, -212), 4.1);
275    ///     let sig_2 = VisionSignature::new((8973, 11143, 10058), (-2119, -1053, -1586), 5.4);
276    ///     let sig_3 = VisionSignature::new((-3665, -2917, -3292), (4135, 10193, 7164), 2.0);
277    ///     let sig_4 = VisionSignature::new((-5845, -4809, -5328), (-5495, -4151, -4822), 3.1);
278    ///
279    ///     // Set signatures 1-4.
280    ///     _ = sensor.set_signature(1, sig_1);
281    ///     _ = sensor.set_signature(2, sig_2);
282    ///     _ = sensor.set_signature(3, sig_3);
283    ///     _ = sensor.set_signature(4, sig_4);
284    ///
285    ///     // Read back the signatures from the sensor's memory.
286    ///     // These should be the signatures that we just set.
287    ///     if let Ok(signatures) = sensor.signatures() {
288    ///         for sig in signatures.into_iter().flatten() {
289    ///             println!("Found sig saved on sensor: {:?}", sig);
290    ///         }
291    ///     }
292    /// }
293    /// ```
294    pub fn signatures(&self) -> Result<[Option<VisionSignature>; 7], VisionError> {
295        Ok([
296            self.signature(1)?,
297            self.signature(2)?,
298            self.signature(3)?,
299            self.signature(4)?,
300            self.signature(5)?,
301            self.signature(6)?,
302            self.signature(7)?,
303        ])
304    }
305
306    /// Registers a color code to the sensor's onboard memory. This code will be used to identify objects
307    /// when using [`VisionSensor::objects`].
308    ///
309    /// Color codes are effectively "signature groups" that the sensor will use to identify objects
310    /// containing the color of their signatures next to each other.
311    ///
312    /// # Volatile Memory
313    ///
314    /// The onboard memory of the Vision Sensor is *volatile* and will therefore be wiped when the
315    /// sensor loses its power source. As a result, this function should be called every time the
316    /// sensor is used on program start.
317    ///
318    /// # Panics
319    ///
320    /// - Panics if one or more of the given signature IDs are not in the interval [0, 7).
321    ///
322    /// # Errors
323    ///
324    /// - A [`VisionError::Port`] error is returned if a vision sensor is not currently connected to the Smart Port.
325    /// - A [`VisionError::ReadingFailed`] error is returned if a read operation failed or there was
326    ///   no signature previously set in the slot(s) specified in the [`VisionCode`].
327    ///
328    /// # Examples
329    ///
330    /// ```
331    /// use vexide::prelude::*;
332    /// use vexide::devices::smart::vision::DetectionSource;
333    ///
334    /// #[vexide::main]
335    /// async fn main(peripherals: Peripherals) {
336    ///     let mut sensor = VisionSensor::new(peripherals.port_1);
337    ///
338    ///     // Two color signatures.
339    ///     let sig_1 = VisionSignature::new((10049, 11513, 10781), (-425, 1, -212), 4.1);
340    ///     let sig_2 = VisionSignature::new((8973, 11143, 10058), (-2119, -1053, -1586), 5.4);
341    ///
342    ///     // Store the signatures on the sensor.
343    ///     _ = sensor.set_signature(1, sig_1);
344    ///     _ = sensor.set_signature(2, sig_2);
345    ///
346    ///     // Create a code assocating signatures 1 and 2 together.
347    ///     let code = VisionCode::from((1, 2));
348    ///
349    ///     // Register our code on the sensor. When we call [`VisionSensor::objects`], the associated
350    ///     // signatures will be returned as a single object if their colors are detected next to each other.
351    ///     _ = sensor.add_code(code);
352    ///
353    ///     // Scan for objects. Filter only objects matching the code we just set.
354    ///     if let Ok(objects) = sensor.objects() {
355    ///         for object in objects.iter().filter(|obj| obj.source == DetectionSource::Code(code)) {
356    ///             println!("{:?}", object);
357    ///         }
358    ///     }
359    /// }
360    /// ```
361    pub fn add_code(&mut self, code: impl Into<VisionCode>) -> Result<(), VisionError> {
362        self.validate_port()?;
363
364        let code = code.into();
365
366        self.write_signature_type(code.0, u32::from(V5VisionBlockType::kVisionTypeColorCode.0))?;
367        self.write_signature_type(code.1, u32::from(V5VisionBlockType::kVisionTypeColorCode.0))?;
368        if let Some(sig_3) = code.2 {
369            self.write_signature_type(sig_3, u32::from(V5VisionBlockType::kVisionTypeColorCode.0))?;
370        }
371        if let Some(sig_4) = code.3 {
372            self.write_signature_type(sig_4, u32::from(V5VisionBlockType::kVisionTypeColorCode.0))?;
373        }
374        if let Some(sig_5) = code.4 {
375            self.write_signature_type(sig_5, u32::from(V5VisionBlockType::kVisionTypeColorCode.0))?;
376        }
377
378        self.codes.push(code);
379
380        Ok(())
381    }
382
383    /// Returns the current brightness setting of the vision sensor as a percentage.
384    ///
385    /// The returned result should be from `0.0` (0%) to `1.0` (100%).
386    ///
387    /// # Errors
388    ///
389    /// - A [`VisionError::Port`] error is returned if a vision sensor is not currently connected to the Smart Port.
390    ///
391    /// # Examples
392    ///
393    /// ```
394    /// use vexide::prelude::*;
395    ///
396    /// #[vexide::main]
397    /// async fn main(peripherals: Peripherals) {
398    ///     let mut sensor = VisionSensor::new(peripherals.port_1);
399    ///
400    ///     // Set brightness to 50%
401    ///     _ = sensor.set_brightness(0.5);
402    ///
403    ///     // Give the sensor time to update.
404    ///     sleep(VisionSensor::UPDATE_INTERVAL).await;
405    ///
406    ///     // Read brightness. Should be 50%, since we just set it.
407    ///     if let Ok(brightness) = sensor.brightness() {
408    ///         assert_eq!(brightness, 0.5);
409    ///     }
410    /// }
411    /// ```
412    pub fn brightness(&self) -> Result<f64, VisionError> {
413        self.validate_port()?;
414
415        // SDK function gives us brightness percentage 0-100.
416        Ok(f64::from(unsafe { vexDeviceVisionBrightnessGet(self.device) }) / 100.0)
417    }
418
419    /// Returns the current white balance of the vision sensor as an RGB color.
420    ///
421    /// # Errors
422    ///
423    /// - A [`VisionError::Port`] error is returned if a vision sensor is not currently connected to the Smart Port.
424    ///
425    /// # Examples
426    ///
427    /// ```
428    /// use vexide::prelude::*;
429    ///
430    /// #[vexide::main]
431    /// async fn main(peripherals: Peripherals) {
432    ///     let mut sensor = VisionSensor::new(peripherals.port_1);
433    ///
434    ///     // Set white balance to manual.
435    ///     _ = sensor.set_white_balance(WhiteBalance::Manual(Rgb {
436    ///         r: 255,
437    ///         g: 255,
438    ///         b: 255,
439    ///     }));
440    ///
441    ///     // Give the sensor time to update.
442    ///     sleep(VisionSensor::UPDATE_INTERVAL).await;
443    ///
444    ///     // Read brightness. Should be 50%, since we just set it.
445    ///     if let Ok(white_balance) = sensor.white_balance() {
446    ///         assert_eq!(
447    ///             white_balance,
448    ///             WhiteBalance::Manual(Rgb {
449    ///                 r: 255,
450    ///                 g: 255,
451    ///                 b: 255,
452    ///             })
453    ///         );
454    ///     }
455    /// }
456    /// ```
457    pub fn white_balance(&self) -> Result<WhiteBalance, VisionError> {
458        self.validate_port()?;
459
460        Ok(
461            match unsafe { vexDeviceVisionWhiteBalanceModeGet(self.device) } {
462                V5VisionWBMode::kVisionWBNormal => WhiteBalance::Auto,
463                V5VisionWBMode::kVisionWBStart => WhiteBalance::StartupAuto,
464                V5VisionWBMode::kVisionWBManual => WhiteBalance::Manual({
465                    let raw = unsafe { vexDeviceVisionWhiteBalanceGet(self.device) };
466
467                    Rgb {
468                        r: raw.red,
469                        g: raw.green,
470                        b: raw.blue,
471                    }
472                }),
473                _ => unreachable!(),
474            },
475        )
476    }
477
478    /// Sets the brightness percentage of the vision sensor. Should be between 0.0 and 1.0.
479    ///
480    /// # Errors
481    ///
482    /// - A [`VisionError::Port`] error is returned if a vision sensor is not currently connected to the Smart Port.
483    ///
484    /// # Examples
485    ///
486    /// ```
487    /// use vexide::prelude::*;
488    ///
489    /// #[vexide::main]
490    /// async fn main(peripherals: Peripherals) {
491    ///     let mut sensor = VisionSensor::new(peripherals.port_1);
492    ///
493    ///     // Set brightness to 50%
494    ///     _ = sensor.set_brightness(0.5);
495    /// }
496    /// ```
497    pub fn set_brightness(&mut self, brightness: f64) -> Result<(), VisionError> {
498        self.validate_port()?;
499
500        unsafe { vexDeviceVisionBrightnessSet(self.device, (brightness * 100.0) as u8) }
501
502        Ok(())
503    }
504
505    /// Sets the white balance of the vision sensor.
506    ///
507    /// White balance can be either automatically set or manually set through an RGB color.
508    ///
509    /// # Errors
510    ///
511    /// - A [`VisionError::Port`] error is returned if a vision sensor is not currently connected to the Smart Port.
512    ///
513    /// # Examples
514    ///
515    /// ```
516    /// use vexide::prelude::*;
517    ///
518    /// #[vexide::main]
519    /// async fn main(peripherals: Peripherals) {
520    ///     let mut sensor = VisionSensor::new(peripherals.port_1);
521    ///
522    ///     // Set white balance to manual.
523    ///     _ = sensor.set_white_balance(WhiteBalance::Manual(Rgb {
524    ///         r: 255,
525    ///         g: 255,
526    ///         b: 255,
527    ///     }));
528    /// }
529    /// ```
530    pub fn set_white_balance(&mut self, white_balance: WhiteBalance) -> Result<(), VisionError> {
531        self.validate_port()?;
532
533        unsafe { vexDeviceVisionWhiteBalanceModeSet(self.device, white_balance.into()) }
534
535        if let WhiteBalance::Manual(rgb) = white_balance {
536            unsafe {
537                vexDeviceVisionWhiteBalanceSet(
538                    self.device,
539                    V5_DeviceVisionRgb {
540                        red: rgb.r,
541                        green: rgb.g,
542                        blue: rgb.b,
543
544                        // Pretty sure this field does nothing, but PROS sets it to this.
545                        //
546                        // TODO: Run some hardware tests to see if this value actually influences
547                        // white balance. Based on the Pixy2 API, I doubt it and bet this is just
548                        // here for the LED setter, which uses the same type.
549                        brightness: 255,
550                    },
551                );
552            }
553        }
554
555        Ok(())
556    }
557
558    /// Configure the behavior of the LED indicator on the sensor.
559    ///
560    /// The default behavior is represented by [`LedMode::Auto`], which will display the color of the most prominent
561    /// detected object's signature color. Alternatively, the LED can be configured to display a single RGB color.
562    ///
563    /// # Errors
564    ///
565    /// - A [`VisionError::Port`] error is returned if a vision sensor is not currently connected to the Smart Port.
566    ///
567    /// # Examples
568    ///
569    /// ```
570    /// use vexide::prelude::*;
571    ///
572    /// #[vexide::main]
573    /// async fn main(peripherals: Peripherals) {
574    ///     let mut sensor = VisionSensor::new(peripherals.port_1);
575    ///
576    ///     // Set the LED to red at 100% brightness.
577    ///     _ = sensor.set_led_mode(LedMode::Manual(Rgb { r: 255, g: 0, b: 0 }, 1.0));
578    /// }
579    /// ```
580    pub fn set_led_mode(&mut self, mode: LedMode) -> Result<(), VisionError> {
581        self.validate_port()?;
582
583        unsafe { vexDeviceVisionLedModeSet(self.device, mode.into()) }
584
585        if let LedMode::Manual(rgb, brightness) = mode {
586            unsafe {
587                vexDeviceVisionLedColorSet(
588                    self.device,
589                    V5_DeviceVisionRgb {
590                        red: rgb.r,
591                        green: rgb.g,
592                        blue: rgb.b,
593                        brightness: (brightness * 100.0) as u8,
594                    },
595                );
596            }
597        }
598
599        Ok(())
600    }
601
602    /// Returns the user-set behavior of the LED indicator on the sensor.
603    ///
604    /// # Errors
605    ///
606    /// - A [`VisionError::Port`] error is returned if a vision sensor is not currently connected to the Smart Port.
607    ///
608    /// # Examples
609    ///
610    /// ```
611    /// use vexide::prelude::*;
612    ///
613    /// #[vexide::main]
614    /// async fn main(peripherals: Peripherals) {
615    ///     let mut sensor = VisionSensor::new(peripherals.port_1);
616    ///
617    ///     // Set the LED to red at 100% brightness.
618    ///     _ = sensor.set_led_mode(LedMode::Manual(Rgb { r: 255, g: 0, b: 0 }, 1.0));
619    ///
620    ///     // Give the sensor time to update.
621    ///     sleep(VisionSensor::UPDATE_INTERVAL).await;
622    ///
623    ///     // Check the sensor's reported LED mode. Should be the same as what we just set
624    ///     if let Ok(led_mode) = sensor.led_mode() {
625    ///         assert_eq!(led_mode, LedMode::Manual(Rgb { r: 255, g: 0, b: 0 }, 1.0));
626    ///     }
627    /// }
628    /// ```
629    pub fn led_mode(&self) -> Result<LedMode, VisionError> {
630        self.validate_port()?;
631
632        Ok(match unsafe { vexDeviceVisionLedModeGet(self.device) } {
633            V5VisionLedMode::kVisionLedModeAuto => LedMode::Auto,
634            V5VisionLedMode::kVisionLedModeManual => {
635                let led_color = unsafe { vexDeviceVisionLedColorGet(self.device) };
636
637                LedMode::Manual(
638                    Rgb::new(led_color.red, led_color.green, led_color.blue),
639                    f64::from(led_color.brightness) / 100.0,
640                )
641            }
642            _ => unreachable!(),
643        })
644    }
645
646    /// Returns a [`Vec`] of objects detected by the sensor.
647    ///
648    /// # Errors
649    ///
650    /// - A [`VisionError::Port`] error is returned if a vision sensor is not currently connected to the Smart Port.
651    /// - A [`VisionError::WifiMode`] error is returned if the vision sensor is in Wi-Fi mode.
652    /// - A [`VisionError::ReadingFailed`] error if the objects could not be read from the sensor.
653    ///
654    /// # Examples
655    ///
656    /// With one signature:
657    ///
658    /// ```
659    /// use vexide::prelude::*;
660    ///
661    /// #[vexide::main]
662    /// async fn main(peripherals: Peripherals) {
663    ///     let mut sensor = VisionSensor::new(peripherals.port_1);
664    ///
665    ///     // Set a color signature on the sensor's first slot.
666    ///     _ = sensor.set_signature(1, VisionSignature::new(
667    ///         (10049, 11513, 10781),
668    ///         (-425, 1, -212),
669    ///         4.1,
670    ///     ));
671    ///
672    ///     // Scan for detected objects.
673    ///     if let Ok(objects) = sensor.objects() {
674    ///         for object in objects {
675    ///             println!("{:?}", object);
676    ///         }
677    ///     }
678    /// }
679    /// ```
680    ///
681    /// With multiple signatures:
682    ///
683    /// ```
684    /// use vexide::prelude::*;
685    /// use vexide::devices::smart::vision::DetectionSource;
686    ///
687    /// #[vexide::main]
688    /// async fn main(peripherals: Peripherals) {
689    ///     let mut sensor = VisionSensor::new(peripherals.port_1);
690    ///
691    ///     // Two color signatures.
692    ///     let sig_1 = VisionSignature::new((10049, 11513, 10781), (-425, 1, -212), 4.1);
693    ///     let sig_2 = VisionSignature::new((8973, 11143, 10058), (-2119, -1053, -1586), 5.4);
694    ///
695    ///     // Store the signatures on the sensor.
696    ///     _ = sensor.set_signature(1, sig_1);
697    ///     _ = sensor.set_signature(2, sig_2);
698    ///
699    ///     // Scan for objects.
700    ///     if let Ok(objects) = sensor.objects() {
701    ///         for object in objects {
702    ///             // Identify which signature the detected object matches.
703    ///             match object.source {
704    ///                 DetectionSource::Signature(1) => println!("Detected object matching sig_1: {:?}", object),
705    ///                 DetectionSource::Signature(2) => println!("Detected object matching sig_2: {:?}", object),
706    ///                 _ => {},
707    ///             }
708    ///         }
709    ///     }
710    /// }
711    /// ```
712    pub fn objects(&self) -> Result<Vec<VisionObject>, VisionError> {
713        ensure!(self.mode()? != VisionMode::Wifi, WifiModeSnafu);
714
715        let object_count = unsafe { vexDeviceVisionObjectCountGet(self.device) } as usize;
716        let mut objects = Vec::with_capacity(object_count);
717
718        for i in 0..object_count {
719            let mut object = V5_DeviceVisionObject::default();
720
721            if unsafe { vexDeviceVisionObjectGet(self.device, i as u32, &mut object) } == 0 {
722                return ReadingFailedSnafu.fail();
723            }
724
725            let object: VisionObject = object.into();
726
727            match object.source {
728                DetectionSource::Signature(_) | DetectionSource::Line => {
729                    objects.push(object);
730                }
731                DetectionSource::Code(code) => {
732                    if self.codes.contains(&code) {
733                        objects.push(object);
734                    }
735                }
736            }
737        }
738
739        Ok(objects)
740    }
741
742    /// Returns the number of objects detected by the sensor.
743    ///
744    /// # Errors
745    ///
746    /// - A [`VisionError::Port`] error is returned if a vision sensor is not currently connected to the Smart Port.
747    /// - A [`VisionError::WifiMode`] error is returned if the vision sensor is in Wi-Fi mode.
748    /// - A [`VisionError::ReadingFailed`] error if the objects could not be read from the sensor.
749    ///
750    /// # Examples
751    ///
752    /// ```
753    /// use vexide::prelude::*;
754    ///
755    /// #[vexide::main]
756    /// async fn main(peripherals: Peripherals) {
757    ///     let mut sensor = VisionSensor::new(peripherals.port_1);
758    ///
759    ///     // Set a color signature on the sensor's first slot.
760    ///     _ = sensor.set_signature(1, VisionSignature::new(
761    ///         (10049, 11513, 10781),
762    ///         (-425, 1, -212),
763    ///         4.1,
764    ///     ));
765    ///
766    ///     loop {
767    ///         if let Ok(n) = sensor.object_count() {
768    ///             println!("Sensor is currently detecting {n} objects.");
769    ///         }
770    ///
771    ///         sleep(VisionSensor::UPDATE_INTERVAL).await;
772    ///     }
773    /// }
774    /// ```
775    pub fn object_count(&self) -> Result<usize, VisionError> {
776        // NOTE: We actually can't rely on [`vexDeviceVisionObjectCountGet`], due to the way that
777        // vision codes are registered.
778        //
779        // When a code is registered, all this really does is set a bunch of normal signatures with
780        // an additional flag set (see: [`Self::set_code_signature`]). This means that if the user
781        // has multiple vision codes, we can't distinguish between which objects were detected by
782        // a certain code until AFTER we get the full objects list (where we can then distinguish)
783        // by [`VisionObject::source`].
784        Ok(self.objects()?.len())
785    }
786
787    /// Sets the vision sensor's detection mode. See [`VisionMode`] for more information on what
788    /// each mode does.
789    ///
790    /// # Errors
791    ///
792    /// - A [`VisionError::Port`] error is returned if a vision sensor is not currently connected to the Smart Port.
793    ///
794    /// # Examples
795    ///
796    /// ```
797    /// use vexide::prelude::*;
798    ///
799    /// #[vexide::main]
800    /// async fn main(peripherals: Peripherals) {
801    ///     let mut sensor = VisionSensor::new(peripherals.port_1);
802    ///
803    ///     // Place the sensor into "Wi-Fi mode", allowing you to connect to it via a hotspot
804    ///     // and recieve a video stream of its camera froma nother device.
805    ///     _ = sensor.set_mode(VisionMode::WiFi);
806    /// }
807    /// ```
808    pub fn set_mode(&mut self, mode: VisionMode) -> Result<(), VisionError> {
809        self.validate_port()?;
810
811        unsafe {
812            vexDeviceVisionWifiModeSet(
813                self.device,
814                match mode {
815                    VisionMode::Wifi => V5VisionWifiMode::kVisionWifiModeOn,
816                    _ => V5VisionWifiMode::kVisionWifiModeOff,
817                },
818            );
819
820            vexDeviceVisionModeSet(
821                self.device,
822                match mode {
823                    VisionMode::ColorDetection => V5VisionMode::kVisionModeNormal,
824                    VisionMode::LineDetection => V5VisionMode::kVisionModeLineDetect,
825                    VisionMode::MixedDetection => V5VisionMode::kVisionModeMixed,
826                    // If the user requested Wi-Fi mode, then we already set
827                    // it around 14 lines ago, so there's nothing to do here.
828                    VisionMode::Wifi => return Ok(()),
829                    VisionMode::Test => V5VisionMode::kVisionTypeTest,
830                },
831            );
832        }
833
834        Ok(())
835    }
836
837    /// Returns the current detection mode that the sensor is using.
838    ///
839    /// # Errors
840    ///
841    /// - A [`VisionError::Port`] error is returned if a vision sensor is not currently connected to the Smart Port.
842    ///
843    /// # Examples
844    ///
845    /// ```
846    /// use vexide::prelude::*;
847    ///
848    /// #[vexide::main]
849    /// async fn main(peripherals: Peripherals) {
850    ///     let mut sensor = VisionSensor::new(peripherals.port_1);
851    ///
852    ///     // Place the sensor into "Wi-Fi mode", allowing you to connect to it via a hotspot
853    ///     // and recieve a video stream of its camera froma nother device.
854    ///     _ = sensor.set_mode(VisionMode::WiFi);
855    ///
856    ///     sleep(VisionSensor::UPDATE_INTERVAL).await;
857    ///
858    ///     // Since we just set the mode, we can get the mode off the sensor to verify that it's
859    ///     // now in Wi-Fi mode.
860    ///     if let Ok(mode) = sensor.mode() {
861    ///         assert_eq!(mode, VisionMode::WiFi);
862    ///     }
863    /// }
864    /// ```
865    pub fn mode(&self) -> Result<VisionMode, VisionError> {
866        self.validate_port()?;
867
868        if unsafe { vexDeviceVisionWifiModeGet(self.device) } == V5VisionWifiMode::kVisionWifiModeOn
869        {
870            return Ok(VisionMode::Wifi);
871        }
872
873        Ok(unsafe { vexDeviceVisionModeGet(self.device) }.into())
874    }
875}
876
877impl SmartDevice for VisionSensor {
878    /// The frametime of the Vision Sensor.
879    const UPDATE_INTERVAL: Duration = Duration::from_millis(20);
880
881    fn port_number(&self) -> u8 {
882        self.port.number()
883    }
884
885    fn device_type(&self) -> SmartDeviceType {
886        SmartDeviceType::Vision
887    }
888}
889impl From<VisionSensor> for SmartPort {
890    fn from(device: VisionSensor) -> Self {
891        device.port
892    }
893}
894
895/// A vision detection color signature.
896///
897/// Vision signatures contain information used by the vision sensor to detect objects of a certain
898/// color. These signatures are typically generated through VEX's vision utility tool rather than
899/// written by hand. For creating signatures using the utility, see [`from_utility`].
900///
901/// [`from_utility`]: VisionSignature::from_utility
902///
903/// # Format & Detection Overview
904///
905/// Vision signatures operate in a version of the Y'UV color space, specifically using the "U" and "V"
906/// chroma components for edge detection purposes. This can be seen in the `u_threshold` and
907/// `v_threshold` fields of this struct. These fields place three "threshold" (min, max, mean)
908/// values on the u and v chroma values detected by the sensor. The values are then transformed to a
909/// 3D lookup table to detect actual colors.
910///
911/// There is additionally a `range` field, which works as a scale factor or threshold for how lenient
912/// edge detection should be.
913///
914/// Signatures can additionally be grouped together into [`VisionCode`]s, which narrow the filter for
915/// object detection by requiring two colors.
916#[derive(Debug, Copy, Clone, PartialEq)]
917pub struct VisionSignature {
918    /// The (min, max, mean) values on the "U" axis.
919    ///
920    /// This defines a threshold of values for the sensor to match against a certain chroma in the
921    /// Y'UV color space - specifically on the U component.
922    pub u_threshold: (i32, i32, i32),
923
924    /// The (min, max, mean) values on the V axis.
925    ///
926    /// This defines a threshold of values for the sensor to match against a certain chroma in the
927    /// Y'UV color space - specifically on the "V" component.
928    pub v_threshold: (i32, i32, i32),
929
930    /// The signature range scale factor.
931    ///
932    /// This value effectively serves as a threshold for how lenient the sensor should be
933    /// when detecting the edges of colors. This value ranges from 0-11 in Vision Utility.
934    ///
935    /// Higher values of `range` will increase the range of brightness that the sensor will
936    /// consider to be part of the signature. Lighter/Darker shades of the signature's color
937    /// will be detected more often.
938    pub range: f32,
939
940    /// The signature's flags.
941    pub flags: u8,
942}
943
944impl VisionSignature {
945    /// Create a [`VisionSignature`].
946    ///
947    /// # Examples
948    ///
949    /// ```
950    /// use vexide::devices::smart::vision::VisionSignature;
951    ///
952    /// let my_signature = VisionSignature::new(
953    ///     (10049, 11513, 10781),
954    ///     (-425, 1, -212),
955    ///     4.1,
956    /// );
957    /// ```
958    #[must_use]
959    pub const fn new(
960        u_threshold: (i32, i32, i32),
961        v_threshold: (i32, i32, i32),
962        range: f32,
963    ) -> Self {
964        Self {
965            flags: 0,
966            u_threshold,
967            v_threshold,
968            range,
969        }
970    }
971
972    /// Create a [`VisionSignature`] using the same format as VEX's Vision Utility tool.
973    ///
974    /// # Examples
975    ///
976    /// ```
977    /// use vexide::devices::smart::vision::VisionSignature;
978    ///
979    /// // Register a signature for detecting red objects.
980    /// // This numbers in this signature was generated using VEX's vision utility app.
981    /// let my_signature =
982    ///     VisionSignature::from_utility(1, 10049, 11513, 10781, -425, 1, -212, 4.1, 0);
983    /// ```
984    #[allow(clippy::too_many_arguments)]
985    #[must_use]
986    pub const fn from_utility(
987        _id: u8, // We don't store IDs in our vision signatures.
988        u_min: i32,
989        u_max: i32,
990        u_mean: i32,
991        v_min: i32,
992        v_max: i32,
993        v_mean: i32,
994        range: f32,
995        _signature_type: u32, // This is handled automatically by [`VisionSensor::add_code`].
996    ) -> Self {
997        Self {
998            u_threshold: (u_min, u_max, u_mean),
999            v_threshold: (v_min, v_max, v_mean),
1000            range,
1001            flags: 0,
1002        }
1003    }
1004}
1005
1006impl From<V5_DeviceVisionSignature> for VisionSignature {
1007    fn from(value: V5_DeviceVisionSignature) -> Self {
1008        Self {
1009            u_threshold: (value.uMin, value.uMax, value.uMean),
1010            v_threshold: (value.vMin, value.vMax, value.vMean),
1011            range: value.range,
1012            flags: value.flags,
1013        }
1014    }
1015}
1016
1017/// A vision detection code.
1018///
1019/// Codes are a special type of detection signature that group multiple [`VisionSignature`]s
1020/// together. A [`VisionCode`] can associate 2-5 color signatures together, detecting the resulting object
1021/// when its color signatures are present close to each other.
1022///
1023/// These codes work very similarly to [Pixy2 Color Codes](https://docs.pixycam.com/wiki/doku.php?id=wiki:v2:using_color_codes).
1024#[derive(Debug, Copy, Clone, Eq, PartialEq)]
1025pub struct VisionCode(
1026    pub u8,
1027    pub u8,
1028    pub Option<u8>,
1029    pub Option<u8>,
1030    pub Option<u8>,
1031);
1032
1033impl VisionCode {
1034    /// Creates a new vision code.
1035    ///
1036    /// Two signatures are required to create a vision code, with an additional three
1037    /// optional signatures.
1038    ///
1039    /// # Examples
1040    ///
1041    /// ```
1042    /// use vexide::devices::smart::vision::VisionCode;
1043    ///
1044    /// // Create a vision code associated with signatures 1, 2, and 3.
1045    /// let code = VisionCode::new(1, 2, Some(3), None, None);
1046    /// ```
1047    #[must_use]
1048    pub const fn new(
1049        sig_1: u8,
1050        sig_2: u8,
1051        sig_3: Option<u8>,
1052        sig_4: Option<u8>,
1053        sig_5: Option<u8>,
1054    ) -> Self {
1055        Self(sig_1, sig_2, sig_3, sig_4, sig_5)
1056    }
1057
1058    /// Creates a [`VisionCode`] from a bit representation of its signature IDs.
1059    ///
1060    /// # Examples
1061    ///
1062    /// ```
1063    /// use vexide::devices::smart::vision::VisionCode;
1064    ///
1065    /// let sig_1_id = 1;
1066    /// let sig_2_id = 2;
1067    ///
1068    /// let mut code_id: u16 = 0;
1069    ///
1070    /// // Store the bits of IDs 1 and 2 in the code ID.
1071    /// code_id = (code_id << 3) | u16::from(sig_1_id);
1072    /// code_id = (code_id << 3) | u16::from(sig_2_id);
1073    ///
1074    /// // Create a [`VisionCode`] from signatures 1 and 2.
1075    /// let code = VisionCode::from_id(code_id);
1076    /// ```
1077    #[must_use]
1078    pub const fn from_id(id: u16) -> Self {
1079        const MASK: u16 = (1 << 3) - 1;
1080
1081        Self(
1082            ((id >> 12) & MASK) as u8,
1083            ((id >> 9) & MASK) as u8,
1084            match ((id >> 6) & MASK) as u8 {
1085                0 => None,
1086                sig => Some(sig),
1087            },
1088            match ((id >> 3) & MASK) as u8 {
1089                0 => None,
1090                sig => Some(sig),
1091            },
1092            match (id & MASK) as u8 {
1093                0 => None,
1094                sig => Some(sig),
1095            },
1096        )
1097    }
1098
1099    /// Returns `true` if a given signature ID is stored in this code.
1100    ///
1101    /// # Examples
1102    ///
1103    /// ```
1104    /// use vexide::devices::smart::vision::VisionCode;
1105    ///
1106    /// // Create a vision code associated with signatures 1, 2, and 3.
1107    /// let code = VisionCode::new(1, 2, Some(3), None, None);
1108    ///
1109    /// assert!(code.contains_signature(1));
1110    /// ```
1111    #[must_use]
1112    pub const fn contains_signature(&self, id: u8) -> bool {
1113        if self.0 == id || self.1 == id {
1114            return true;
1115        }
1116
1117        if let Some(sig_3) = self.2 {
1118            if sig_3 == id {
1119                return true;
1120            }
1121        }
1122        if let Some(sig_4) = self.3 {
1123            if sig_4 == id {
1124                return true;
1125            }
1126        }
1127        if let Some(sig_5) = self.4 {
1128            if sig_5 == id {
1129                return true;
1130            }
1131        }
1132
1133        false
1134    }
1135
1136    /// Returns the internal ID used by the sensor to determine which signatures
1137    /// belong to which code.
1138    ///
1139    /// # Examples
1140    ///
1141    /// ```
1142    /// use vexide::devices::smart::vision::VisionCode;
1143    ///
1144    /// let sig_1_id = 1;
1145    /// let sig_2_id = 2;
1146    ///
1147    /// let mut code_id: u16 = 0;
1148    ///
1149    /// // Store the bits of IDs 1 and 2 in the code ID.
1150    /// code_id = (code_id << 3) | u16::from(sig_1_id);
1151    /// code_id = (code_id << 3) | u16::from(sig_2_id);
1152    ///
1153    /// // Create a [`VisionCode`] from signatures 1 and 2.
1154    /// let code = VisionCode::from_id(code_id);
1155    ///
1156    /// // The ID of the code we just created should be identical to the bit representation
1157    /// // containing each signature ID we created it from.
1158    /// assert_eq!(code.id(), code_id);
1159    /// ```
1160    #[must_use]
1161    pub fn id(&self) -> u16 {
1162        let mut id: u16 = 0;
1163
1164        id = (id << 3) | u16::from(self.0);
1165        id = (id << 3) | u16::from(self.1);
1166        id = (id << 3) | u16::from(self.2.unwrap_or_default());
1167        id = (id << 3) | u16::from(self.3.unwrap_or_default());
1168        id = (id << 3) | u16::from(self.4.unwrap_or_default());
1169
1170        id
1171    }
1172}
1173
1174impl From<(u8, u8)> for VisionCode {
1175    /// Convert a tuple of two [`VisionSignature`]s into a [`VisionCode`].
1176    fn from(signatures: (u8, u8)) -> Self {
1177        Self(signatures.0, signatures.1, None, None, None)
1178    }
1179}
1180
1181impl From<(u8, u8, u8)> for VisionCode {
1182    /// Convert a tuple of three [`VisionSignature`]s into a [`VisionCode`].
1183    fn from(signatures: (u8, u8, u8)) -> Self {
1184        Self(signatures.0, signatures.1, Some(signatures.2), None, None)
1185    }
1186}
1187
1188impl From<(u8, u8, u8, u8)> for VisionCode {
1189    /// Convert a tuple of four [`VisionSignature`]s into a [`VisionCode`].
1190    fn from(signatures: (u8, u8, u8, u8)) -> Self {
1191        Self(
1192            signatures.0,
1193            signatures.1,
1194            Some(signatures.2),
1195            Some(signatures.3),
1196            None,
1197        )
1198    }
1199}
1200
1201impl From<(u8, u8, u8, u8, u8)> for VisionCode {
1202    /// Convert a tuple of five [`VisionSignature`]s into a [`VisionCode`].
1203    fn from(signatures: (u8, u8, u8, u8, u8)) -> Self {
1204        Self(
1205            signatures.0,
1206            signatures.1,
1207            Some(signatures.2),
1208            Some(signatures.3),
1209            Some(signatures.4),
1210        )
1211    }
1212}
1213
1214/// A possible "detection mode" for the vision sensor.
1215#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)]
1216pub enum VisionMode {
1217    /// Uses color signatures and codes to identify objects in blocks.
1218    #[default]
1219    ColorDetection,
1220
1221    /// Uses line tracking to identify lines.
1222    LineDetection,
1223
1224    /// Both color signatures and lines will be detected as objects.
1225    MixedDetection,
1226
1227    /// Sets the sensor into "Wi-Fi mode", which disables all forms of object detection and
1228    /// enables the sensor's onboard Wi-Fi hotspot for streaming camera data over a web server.
1229    ///
1230    /// Once enabled, the sensor will create a wireless network with an SSID
1231    /// in the format of VISION_XXXX. The sensor's camera feed is available
1232    /// at `192.168.1.1`.
1233    ///
1234    /// This mode will be automatically disabled when connected to field control.
1235    Wifi,
1236
1237    /// Unknown use.
1238    Test,
1239}
1240
1241impl From<V5VisionMode> for VisionMode {
1242    fn from(value: V5VisionMode) -> Self {
1243        match value {
1244            V5VisionMode::kVisionModeNormal => Self::ColorDetection,
1245            V5VisionMode::kVisionModeLineDetect => Self::LineDetection,
1246            V5VisionMode::kVisionModeMixed => Self::MixedDetection,
1247            V5VisionMode::kVisionTypeTest => Self::Test,
1248            _ => unreachable!(),
1249        }
1250    }
1251}
1252
1253/// Defines a source for what method was used to detect a [`VisionObject`].
1254#[derive(Debug, Clone, Copy, PartialEq, Eq)]
1255pub enum DetectionSource {
1256    /// A normal vision signature not associated with a color code was used to detect this object.
1257    Signature(u8),
1258
1259    /// Multiple signatures joined in a color code were used to detect this object.
1260    Code(VisionCode),
1261
1262    /// Line detection was used to find this object.
1263    Line,
1264}
1265
1266/// A detected vision object.
1267///
1268/// This struct contains metadata about objects detected by the vision sensor. Objects are
1269/// detected by calling [`VisionSensor::objects`] after adding signatures and color codes
1270/// to the sensor.
1271#[derive(Debug, Clone, Copy, PartialEq, Eq)]
1272pub struct VisionObject {
1273    /// The ID of the signature or color code used to detect this object.
1274    pub source: DetectionSource,
1275
1276    /// The width of the detected object's bounding box in pixels.
1277    pub width: u16,
1278
1279    /// The height of the detected object's bounding box in pixels.
1280    pub height: u16,
1281
1282    /// The top-left coordinate of the detected object relative to the top-left
1283    /// of the camera's field of view.
1284    pub offset: Point2<u16>,
1285
1286    /// The center coordinate of the detected object relative to the top-left
1287    /// of the camera's field of view.
1288    pub center: Point2<u16>,
1289
1290    /// The approximate degrees of rotation of the detected object's bounding box.
1291    pub angle: u16,
1292}
1293
1294impl From<V5_DeviceVisionObject> for VisionObject {
1295    fn from(value: V5_DeviceVisionObject) -> Self {
1296        Self {
1297            source: match value.r#type {
1298                V5VisionBlockType::kVisionTypeColorCode => {
1299                    DetectionSource::Code(VisionCode::from_id(value.signature))
1300                }
1301                V5VisionBlockType::kVisionTypeNormal => {
1302                    DetectionSource::Signature(value.signature as u8)
1303                }
1304                V5VisionBlockType::kVisionTypeLineDetect => DetectionSource::Line,
1305                x => panic!("Unknown vision block type: {x:?}"),
1306            },
1307            width: value.width,
1308            height: value.height,
1309            offset: Point2 {
1310                x: value.xoffset,
1311                y: value.yoffset,
1312            },
1313            center: Point2 {
1314                x: value.xoffset + (value.width / 2),
1315                y: value.yoffset + (value.height / 2),
1316            },
1317            angle: value.angle * 10,
1318        }
1319    }
1320}
1321
1322/// Vision Sensor white balance mode.
1323///
1324/// Represents a white balance configuration for the vision sensor's camera.
1325#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)]
1326pub enum WhiteBalance {
1327    /// Automatic Mode
1328    ///
1329    /// The sensor will automatically adjust the camera's white balance, using the brightest
1330    /// part of the image as a white point.
1331    #[default]
1332    Auto,
1333
1334    /// "Startup" Automatic Mode
1335    ///
1336    /// The sensor will automatically adjust the camera's white balance, but will only perform
1337    /// this adjustment once on power-on.
1338    StartupAuto,
1339
1340    /// Manual Mode
1341    ///
1342    /// Allows for manual control over white balance using an RGB color.
1343    Manual(Rgb<u8>),
1344}
1345
1346impl From<WhiteBalance> for V5VisionWBMode {
1347    fn from(value: WhiteBalance) -> Self {
1348        match value {
1349            WhiteBalance::Auto => Self::kVisionWBNormal,
1350            WhiteBalance::StartupAuto => Self::kVisionWBStart,
1351            WhiteBalance::Manual(_) => Self::kVisionWBManual,
1352        }
1353    }
1354}
1355
1356/// Vision Sensor LED mode.
1357///
1358/// Represents the states that the integrated LED indicator on a vision sensor can be in.
1359#[derive(Default, Debug, Clone, Copy, PartialEq)]
1360pub enum LedMode {
1361    /// Automatic Mode
1362    ///
1363    /// When in automatic mode, the integrated LED will display the color of the most prominent
1364    /// detected object's signature color.
1365    #[default]
1366    Auto,
1367
1368    /// Manual Mode
1369    ///
1370    /// When in manual mode, the integrated LED will display a user-set RGB color code and brightness
1371    /// percentage from 0.0-1.0.
1372    Manual(Rgb<u8>, f64),
1373}
1374
1375impl From<LedMode> for V5VisionLedMode {
1376    fn from(value: LedMode) -> Self {
1377        match value {
1378            LedMode::Auto => Self::kVisionLedModeAuto,
1379            LedMode::Manual(_, _) => Self::kVisionLedModeManual,
1380        }
1381    }
1382}
1383
1384#[derive(Debug, Snafu)]
1385/// Errors that can occur when using a vision sensor.
1386pub enum VisionError {
1387    /// Objects cannot be detected while Wi-Fi mode is enabled.
1388    WifiMode,
1389
1390    /// The camera could not be read.
1391    ReadingFailed,
1392
1393    /// Generic port related error.
1394    #[snafu(transparent)]
1395    Port {
1396        /// The source of the error.
1397        source: PortError,
1398    },
1399}