vexide_devices/smart/
ai_vision.rs

1//! AI Vision sensor device.
2//!
3//! This module provides an API for interacting with the AI Vision sensor.
4//! The AI Vision sensor is meant to be a direct upgrade from the [Vision Sensor](super::vision)
5//! with a wider camera range and AI model capabilities.
6//!
7//! # Hardware overview
8//!
9//! The AI Vision sensor has three detection modes that can all be enabled at the same time:
10//!     - [Color detection](AiVisionDetectionMode::COLOR)
11//!     - [Custom model detection](AiVisionDetectionMode::MODEL)
12//!     - [AprilTag detection](AiVisionDetectionMode::APRILTAG) (requires color detection to be enabled)
13//! Currently there is no known way to upload custom models to the sensor and fields do not have AprilTags.
14//! However, there are built-in models that can be used for detection.
15//! See [VEX's documentation](https://kb.vex.com/hc/en-us/articles/30326315023892-Using-AI-Classifications-with-the-AI-Vision-Sensor) for more information.
16//!
17//! The resolution of the AI Vision sensor is 320x240 pixels.
18//! It has a horizontal FOV of 74 degrees and a vertical FOV of 63 degrees.
19//! Both of these values are a slight upgrade from the Vision Sensor.
20//!
21//! Unlike the Vision Sensor, the AI Vision sensor uses more readable color signatures
22//! that may be created without the AI Vision utility.
23//! It still has a USB port that can be used to create these signatures with VEX's utility.
24
25use alloc::{
26    ffi::{CString, IntoStringError},
27    string::String,
28    vec::Vec,
29};
30
31use bitflags::bitflags;
32use mint::Point2;
33use rgb::Rgb;
34use snafu::Snafu;
35use vex_sdk::{
36    vexDeviceAiVisionClassNameGet, vexDeviceAiVisionCodeGet, vexDeviceAiVisionCodeSet,
37    vexDeviceAiVisionColorGet, vexDeviceAiVisionColorSet, vexDeviceAiVisionModeSet,
38    vexDeviceAiVisionObjectCountGet, vexDeviceAiVisionObjectGet, vexDeviceAiVisionStatusGet,
39    vexDeviceAiVisionTemperatureGet, V5_DeviceAiVisionCode, V5_DeviceAiVisionColor,
40    V5_DeviceAiVisionObject, V5_DeviceT,
41};
42
43use super::{SmartDevice, SmartDeviceType, SmartPort};
44use crate::PortError;
45
46type Result<T, E = AiVisionError> = core::result::Result<T, E>;
47
48#[repr(u8)]
49enum ObjectType {
50    Unknown = 0,
51    Color = (1 << 0),
52    Code = (1 << 1),
53    Model = (1 << 2),
54    AprilTag = (1 << 3),
55    All = 0x3F,
56}
57impl From<u8> for ObjectType {
58    fn from(value: u8) -> Self {
59        #[allow(clippy::match_same_arms)]
60        match value {
61            1 => ObjectType::Color,
62            2 => ObjectType::Code,
63            4 => ObjectType::Model,
64            8 => ObjectType::AprilTag,
65            63 => ObjectType::All,
66            _ => ObjectType::Unknown,
67        }
68    }
69}
70
71/// The data associated with an AI Vision object.
72/// The data is different depending on the type of object detected.
73#[derive(Debug, Clone, PartialEq)]
74pub enum AiVisionObject {
75    /// An object detected by color blob detection.
76    Color {
77        /// ID of the signature used to detect this object.
78        id: u8,
79        /// The top-left corner of the object.
80        position: Point2<u16>,
81        /// The width of the object.
82        width: u16,
83        /// The height of the object.
84        height: u16,
85    },
86
87    /// An object detected by color code detection.
88    Code {
89        /// ID of the code used to detect this object.
90        id: u8,
91        /// The position of the object.
92        position: Point2<u16>,
93        /// The width of the object.
94        width: u16,
95        /// The height of the object.
96        height: u16,
97        /// The angle of the object's associated colors. Not always reliably available.
98        angle: f64,
99    },
100
101    /// An object detected by apriltag detection.
102    AprilTag {
103        /// The detected AprilTag(s) ID number
104        id: u8,
105        /// Position of the top-left corner of the tag
106        top_left: mint::Point2<i16>,
107        /// Position of the top-right corner of the tag
108        top_right: mint::Point2<i16>,
109        /// Position of the top-right corner of the tag
110        bottom_right: mint::Point2<i16>,
111        /// Position of the bottom-left corner of the tag
112        bottom_left: mint::Point2<i16>,
113    },
114
115    /// An object detected by an onboard model.
116    Model {
117        /// ID of the detected object.
118        id: u8,
119        /// A string describing the specific onboard model used to detect this object.
120        classification: String,
121        /// The position of the object.
122        position: Point2<u16>,
123        /// The width of the object.
124        width: u16,
125        /// The height of the object.
126        height: u16,
127        /// The confidence reported by the model.
128        confidence: u16,
129    },
130}
131
132/// Possible april tag families to be detected by the sensor.
133#[derive(Default, Debug, Copy, Clone, Eq, PartialEq)]
134#[repr(u8)]
135pub enum AprilTagFamily {
136    /// Circle21h7 family
137    #[default]
138    Circle21h7 = 0,
139    /// 16h5 family
140    Tag16h5 = 1,
141    /// 25h9 family
142    Tag25h9 = 2,
143    /// 36h11 family
144    Tag36h11 = 3,
145}
146
147bitflags! {
148    /// Represents the mode of the AI Vision sensor.
149    #[derive(Debug, Copy, Clone, Eq, PartialEq)]
150    pub struct AiVisionFlags: u8 {
151        /// Disable apriltag detection
152        const DISABLE_APRILTAG = 1 << 0;
153        /// Disable color detection
154        const DISABLE_COLOR = 1 << 1;
155        /// Disable model detection
156        const DISABLE_MODEL = 1 << 2;
157        /// Merge color blobs?
158        const COLOR_MERGE = 1 << 4;
159        /// Disable status overlay
160        const DISABLE_STATUS_OVERLAY = 1 << 5;
161        /// Disable USB overlay
162        const DISABLE_USB_OVERLAY = 1 << 7;
163    }
164
165    /// Flags relating to the sensor's detection mode.
166    #[derive(Debug, Copy, Clone, Eq, PartialEq)]
167    pub struct AiVisionDetectionMode: u8 {
168        /// Enable apriltag detection
169        const APRILTAG = 1 << 0;
170        /// Enable color detection
171        const COLOR = 1 << 1;
172        /// Enable model detection
173        const MODEL = 1 << 2;
174        /// Merge color blobs?
175        const COLOR_MERGE = 1 << 4;
176    }
177}
178
179impl Default for AiVisionFlags {
180    fn default() -> Self {
181        Self::DISABLE_USB_OVERLAY
182    }
183}
184
185impl From<AiVisionDetectionMode> for AiVisionFlags {
186    fn from(value: AiVisionDetectionMode) -> Self {
187        !Self::from_bits((value ^ AiVisionDetectionMode::COLOR_MERGE).bits()).unwrap_or_default()
188            & !(Self::DISABLE_STATUS_OVERLAY | Self::DISABLE_USB_OVERLAY)
189    }
190}
191
192/// A color signature used by an AI Vision Sensor to detect color blobs.
193#[derive(Debug, Copy, Clone, PartialEq)]
194pub struct AiVisionColor {
195    /// The RGB color value.
196    pub rgb: Rgb<u8>,
197    /// The accepted hue range of the color. VEXcode limits this value to [0, 20]
198    pub hue_range: f32,
199    /// The accepted saturation range of the color.
200    pub saturation_range: f32,
201}
202
203/// A color code used by an AI Vision Sensor to detect groups of color blobs.
204///
205/// The color code can have up to 7 color signatures.
206/// When the colors in a color code are detected next to eachother, the sensor will detect the color code.
207pub struct AiVisionColorCode([Option<u8>; 7]);
208impl AiVisionColorCode {
209    /// Creates a new color code with the given color signature ids.
210    #[must_use]
211    pub const fn new<const N: usize>(code: [Option<u8>; 7]) -> Self {
212        Self(code)
213    }
214
215    /// Returns the color signature ids in the color code.
216    #[must_use]
217    pub fn colors(&self) -> Vec<u8> {
218        self.0.iter().flatten().copied().collect()
219    }
220}
221impl From<(u8,)> for AiVisionColorCode {
222    fn from(value: (u8,)) -> Self {
223        Self([Some(value.0), None, None, None, None, None, None])
224    }
225}
226impl From<(u8, u8)> for AiVisionColorCode {
227    fn from(value: (u8, u8)) -> Self {
228        Self([Some(value.0), Some(value.1), None, None, None, None, None])
229    }
230}
231impl From<(u8, u8, u8)> for AiVisionColorCode {
232    fn from(value: (u8, u8, u8)) -> Self {
233        Self([
234            Some(value.0),
235            Some(value.1),
236            Some(value.2),
237            None,
238            None,
239            None,
240            None,
241        ])
242    }
243}
244impl From<(u8, u8, u8, u8)> for AiVisionColorCode {
245    fn from(value: (u8, u8, u8, u8)) -> Self {
246        Self([
247            Some(value.0),
248            Some(value.1),
249            Some(value.2),
250            Some(value.3),
251            None,
252            None,
253            None,
254        ])
255    }
256}
257impl From<(u8, u8, u8, u8, u8)> for AiVisionColorCode {
258    fn from(value: (u8, u8, u8, u8, u8)) -> Self {
259        Self([
260            Some(value.0),
261            Some(value.1),
262            Some(value.2),
263            Some(value.3),
264            Some(value.4),
265            None,
266            None,
267        ])
268    }
269}
270impl From<(u8, u8, u8, u8, u8, u8)> for AiVisionColorCode {
271    fn from(value: (u8, u8, u8, u8, u8, u8)) -> Self {
272        Self([
273            Some(value.0),
274            Some(value.1),
275            Some(value.2),
276            Some(value.3),
277            Some(value.4),
278            Some(value.5),
279            None,
280        ])
281    }
282}
283impl From<(u8, u8, u8, u8, u8, u8, u8)> for AiVisionColorCode {
284    fn from(value: (u8, u8, u8, u8, u8, u8, u8)) -> Self {
285        Self([
286            Some(value.0),
287            Some(value.1),
288            Some(value.2),
289            Some(value.3),
290            Some(value.4),
291            Some(value.5),
292            Some(value.6),
293        ])
294    }
295}
296macro_rules! impl_code_from_array {
297    ($($size:literal),*) => {
298        $(
299            impl From<[Option<u8>; $size]> for AiVisionColorCode {
300                fn from(value: [Option<u8>; $size]) -> Self {
301                    let mut code = [None; 7];
302                    code[..$size].copy_from_slice(&value[..]);
303                    Self(code)
304                }
305            }
306            impl From<[u8; $size]> for AiVisionColorCode {
307                fn from(value: [u8; $size]) -> Self {
308                    let mut code = [None; 7];
309                    for (i, id) in value.iter().enumerate() {
310                        code[i] = Some(*id);
311                    }
312                    Self(code)
313                }
314            }
315        )*
316    };
317}
318impl_code_from_array!(1, 2, 3, 4, 5, 6, 7);
319
320/// An AI Vision sensor.
321pub struct AiVisionSensor {
322    port: SmartPort,
323    device: V5_DeviceT,
324}
325
326// SAFETY: Required because we store a raw pointer to the device handle to avoid it getting from the
327// SDK each device function. Simply sharing a raw pointer across threads is not inherently unsafe.
328unsafe impl Send for AiVisionSensor {}
329unsafe impl Sync for AiVisionSensor {}
330
331impl AiVisionSensor {
332    /// Maximum number of objects that can be detected at once.
333    pub const MAX_OBJECTS: usize = 24;
334
335    /// The horizontal resolution of the AI Vision sensor.
336    pub const HORIZONTAL_RESOLUTION: u16 = 320;
337
338    /// The vertical resolution of the AI Vision sensor.
339    pub const VERTICAL_RESOLUTION: u16 = 240;
340
341    /// The horizontal FOV of the vision sensor in degrees.
342    pub const HORIZONTAL_FOV: f32 = 74.0;
343
344    /// The vertical FOV of the vision sensor in degrees.
345    pub const VERTICAL_FOV: f32 = 63.0;
346
347    /// The diagonal FOV of the vision sensor in degrees.
348    pub const DIAGONAL_FOV: f32 = 87.0;
349
350    const RESET_FLAG: u32 = (1 << 30);
351    const TAG_SET_FLAG: u32 = (1 << 29);
352    const MODE_SET_FLAG: u32 = (1 << 25);
353    const TEST_MODE_FLAG: u32 = (1 << 26);
354    const AWB_START_FLAG: u32 = (1 << 27);
355
356    // const AWB_START_VALUE: u32 = 4;
357
358    /// Create a new AI Vision sensor from a smart port with the given brightness and contrast.
359    ///
360    /// # Examples
361    ///
362    /// ```
363    /// use vexide::prelude::*;
364    ///
365    /// #[vexide::main]
366    /// async fn main(peripherals: Peripherals) {
367    ///     let mut ai_vision = AiVisionSensor::new(peripherals.port_1);
368    ///     // Do something with the AI Vision sensor
369    /// }
370    /// ```
371    #[must_use]
372    pub fn new(port: SmartPort) -> Self {
373        let device = unsafe { port.device_handle() };
374
375        unsafe {
376            vexDeviceAiVisionModeSet(device, Self::RESET_FLAG);
377        }
378
379        Self { port, device }
380    }
381
382    /// Returns the current temperature of the AI Vision sensor.
383    ///
384    /// # Errors
385    ///
386    /// - A [`PortError`] is returned if an AI Vision is not connected to the Smart Port.
387    ///
388    /// # Examples
389    ///
390    /// ```
391    /// use vexide::prelude::*;
392    ///
393    /// #[vexide::main]
394    /// async fn main(peripherals: Peripherals) {
395    ///     let ai_vision = AiVisionSensor::new(peripherals.port_1);
396    ///     loop {
397    ///         println!("{:?}", ai_vision.temperature());
398    ///         sleep(AiVisionSensor::UPDATE_INTERVAL).await;
399    ///     }
400    /// }
401    /// ```
402    pub fn temperature(&self) -> Result<f64> {
403        self.validate_port()?;
404        Ok(unsafe { vexDeviceAiVisionTemperatureGet(self.device) })
405    }
406
407    /// Sets a color code used to detect groups of colors.
408    ///
409    /// # Panics
410    ///
411    /// - Panics if the given color code contains an ID that is not in the interval [1, 7].
412    /// - Panics if the given ID is not in the interval [1, 8].
413    ///
414    /// # Errors
415    ///
416    /// - A [`PortError`] is returned if an AI Vision is not connected to the Smart Port.
417    ///
418    /// # Examples
419    ///
420    /// ```
421    /// use vexide::prelude::*;
422    ///
423    /// #[vexide::main]
424    /// async fn main(peripherals: Peripherals) {
425    ///     let mut ai_vision = AiVisionSensor::new(peripherals.port_1);
426    ///     let color = AiVisionColor {
427    ///         rgb: Rgb::new(255, 0, 0),
428    ///         hue: 10.0,
429    ///         saturation: 1.0,
430    ///     };
431    ///     _ = ai_vision.set_color(1, color);
432    ///     let code = AiVisionColorCode::from([1]);
433    ///     _ = ai_vision.set_color_code(1, &code);
434    /// }
435    /// ```
436    pub fn set_color_code(&mut self, id: u8, code: &AiVisionColorCode) -> Result<()> {
437        assert!(
438            !(1..=8).contains(&id),
439            "The given ID ({id}) is out of the interval [1, 8]."
440        );
441        self.validate_port()?;
442
443        // Copy the color code into the V5_DeviceAiVisionCode struct
444        let mut ids = [0u8; 7];
445        for (i, id) in code.0.iter().flatten().enumerate() {
446            assert!(
447                !(1..=7).contains(id),
448                "The given color code contains an ID ({id}) that is out of the interval [1, 7]."
449            );
450            ids[i] = *id;
451        }
452
453        // Calculate the length of the color code color ids
454        let mut len = 0;
455        for id in &ids {
456            if *id != 0 {
457                len += 1;
458            } else {
459                break;
460            }
461        }
462
463        let mut code = V5_DeviceAiVisionCode {
464            id,
465            len,
466            c1: i16::from(ids[0]),
467            c2: i16::from(ids[1]),
468            c3: i16::from(ids[2]),
469            c4: i16::from(ids[3]),
470            c5: i16::from(ids[4]),
471            c6: i16::from(ids[5]),
472            c7: i16::from(ids[6]),
473        };
474        unsafe {
475            vexDeviceAiVisionCodeSet(self.device, core::ptr::from_mut(&mut code));
476        }
477
478        Ok(())
479    }
480
481    /// Returns the color code set on the AI Vision sensor with the given ID if it exists.
482    ///
483    /// # Panics
484    ///
485    /// - Panics if the given ID is not in the interval [1, 8].
486    ///
487    /// # Errors
488    ///
489    /// - A [`PortError`] is returned if an AI Vision is not connected to the Smart Port.
490    ///
491    /// # Examples
492    ///
493    /// ```
494    /// use vexide::prelude::*;
495    ///
496    /// #[vexide::main]
497    /// async fn main(peripherals: Peripherals) {
498    ///     let mut ai_vision = AiVisionSensor::new(peripherals.port_1);
499    ///     let code = AiVisionColorCode::from([1]);
500    ///     _ = ai_vision.set_color_code(1, &code);
501    ///     if let Ok(Some(code)) = ai_vision.color_code(1) {
502    ///          println!("{:?}", code);
503    ///     } else {
504    ///         println!("Something went wrong!");
505    ///     }
506    /// }
507    /// ```
508    pub fn color_code(&self, id: u8) -> Result<Option<AiVisionColorCode>> {
509        assert!(
510            !(1..=8).contains(&id),
511            "The given ID ({id}) is out of the interval [1, 8]."
512        );
513        self.validate_port()?;
514
515        // Get the color code from the sensor
516        let mut code: V5_DeviceAiVisionCode = unsafe { core::mem::zeroed() };
517        let read = unsafe {
518            vexDeviceAiVisionCodeGet(self.device, id.into(), core::ptr::from_mut(&mut code))
519        };
520        if !read {
521            return Ok(None);
522        }
523
524        // Get the valid (hopefully) color ids from the color code
525        let ids = [
526            code.c1, code.c2, code.c3, code.c4, code.c5, code.c6, code.c7,
527        ];
528        let mut color_ids = [None; 7];
529        for i in 0..code.len as usize {
530            color_ids[i] = Some(ids[i] as u8);
531        }
532
533        let signature = AiVisionColorCode::from(color_ids);
534
535        Ok(Some(signature))
536    }
537
538    /// Returns all color codes set on the AI Vision sensor.
539    ///
540    /// # Errors
541    ///
542    /// - A [`PortError`] is returned if an AI Vision is not connected to the Smart Port.
543    ///
544    /// # Examples
545    ///
546    /// ```
547    /// use vexide::prelude::*;
548    ///
549    /// #[vexide::main]
550    /// async fn main(peripherals: Peripherals) {
551    ///     let mut ai_vision = AiVisionSensor::new(peripherals.port_1);
552    ///     _ = ai_vision.set_color_code(1, &AiVisionColorCode::from([1]));
553    ///     _ = ai_vision.set_color_code(2, &AiVisionColorCode::from([1, 2]));
554    ///     println!("{:?}", ai_vision.color_codes());
555    /// }
556    /// ```
557    pub fn color_codes(&self) -> Result<[Option<AiVisionColorCode>; 8]> {
558        Ok([
559            self.color_code(1)?,
560            self.color_code(2)?,
561            self.color_code(3)?,
562            self.color_code(4)?,
563            self.color_code(5)?,
564            self.color_code(6)?,
565            self.color_code(7)?,
566            self.color_code(8)?,
567        ])
568    }
569
570    /// Sets a color signature for the AI Vision sensor.
571    ///
572    /// # Panics
573    ///
574    /// - Panics if the given ID is not in the range [1, 7].
575    ///
576    /// # Errors
577    ///
578    /// - A [`PortError`] is returned if an AI Vision is not connected to the Smart Port.
579    ///
580    /// # Examples
581    ///
582    /// ```
583    /// use vexide::prelude::*;
584    ///
585    /// #[vexide::main]
586    /// async fn main(peripherals: Peripherals) {
587    ///     let mut ai_vision = AiVisionSensor::new(peripherals.port_1);
588    ///     let color = AiVisionColor {
589    ///         rgb: Rgb::new(255, 0, 0),
590    ///         hue: 10.0,
591    ///         saturation: 1.0,
592    ///     };
593    ///     _ = ai_vision.set_color(1, color);
594    ///     _ = ai_vision.set_color(2, color);
595    /// }
596    /// ```
597    pub fn set_color(&mut self, id: u8, color: AiVisionColor) -> Result<()> {
598        assert!(
599            !(1..=7).contains(&id),
600            "The given ID ({id}) is out of the interval [1, 7]."
601        );
602        self.validate_port()?;
603
604        let mut color = V5_DeviceAiVisionColor {
605            id,
606            red: color.rgb.r,
607            grn: color.rgb.g,
608            blu: color.rgb.b,
609            hangle: color.hue_range,
610            hdsat: color.saturation_range,
611            reserved: 0,
612        };
613
614        //TODO: Make sure that the color is not modified by this function
615        unsafe { vexDeviceAiVisionColorSet(self.device, core::ptr::from_mut(&mut color)) }
616
617        Ok(())
618    }
619
620    /// Returns the color signature set on the AI Vision sensor with the given ID if it exists.
621    ///
622    /// # Panics
623    ///
624    /// - Panics if the given ID is not in the interval [1, 7].
625    ///
626    /// # Errors
627    ///
628    /// - A [`PortError`] is returned if an AI Vision is not connected to the Smart Port.
629    ///
630    /// # Examples
631    ///
632    /// ```
633    /// use vexide::prelude::*;
634    ///
635    /// #[vexide::main]
636    /// async fn main(peripherals: Peripherals) {
637    ///     let ai_vision = AiVisionSensor::new(peripherals.port_1);
638    ///     let color = AiVisionColor {
639    ///         rgb: Rgb::new(255, 0, 0),
640    ///         hue: 10.0,
641    ///         saturation: 1.0,
642    ///     };
643    ///     _ = ai_vision.set_color(1, color);
644    ///     if let Ok(Some(color)) = ai_vision.color(1) {
645    ///         println!("{:?}", color);
646    ///     } else {
647    ///         println!("Something went wrong!");
648    ///     }
649    /// }
650    /// ```
651    pub fn color(&self, id: u8) -> Result<Option<AiVisionColor>> {
652        assert!(
653            !(1..=7).contains(&id),
654            "The given ID ({id}) is out of the interval [1, 7]."
655        );
656        self.validate_port()?;
657
658        let mut color: V5_DeviceAiVisionColor = unsafe { core::mem::zeroed() };
659
660        let read = unsafe {
661            vexDeviceAiVisionColorGet(self.device, u32::from(id), core::ptr::from_mut(&mut color))
662        };
663        if !read {
664            return Ok(None);
665        }
666
667        Ok(Some(AiVisionColor {
668            rgb: Rgb::new(color.red, color.grn, color.blu),
669            hue_range: color.hangle,
670            saturation_range: color.hdsat,
671        }))
672    }
673
674    /// Returns all color signatures set on the AI Vision sensor.
675    ///
676    /// # Errors
677    ///
678    /// - A [`PortError`] is returned if an AI Vision is not connected to the Smart Port.
679    ///
680    /// # Examples
681    ///
682    /// ```
683    /// use vexide::prelude::*;
684    ///
685    /// #[vexide::main]
686    /// async fn main(peripherals: Peripherals) {
687    ///     let ai_vision = AiVisionSensor::new(peripherals.port_1);
688    ///     let color = AiVisionColor {
689    ///         rgb: Rgb::new(255, 0, 0),
690    ///         hue: 10.0,
691    ///         saturation: 1.0,
692    ///     };
693    ///     _ = ai_vision.set_color(1, color);
694    ///     let colors = ai_vision.colors().unwrap();
695    ///     println!("{:?}", colors);
696    /// }
697    /// ```
698    pub fn colors(&self) -> Result<[Option<AiVisionColor>; 7]> {
699        Ok([
700            self.color(1)?,
701            self.color(2)?,
702            self.color(3)?,
703            self.color(4)?,
704            self.color(5)?,
705            self.color(6)?,
706            self.color(7)?,
707        ])
708    }
709
710    /// Sets the detection mode of the AI Vision sensor.
711    ///
712    /// # Errors
713    ///
714    /// - A [`PortError`] is returned if an AI Vision is not connected to the Smart Port.
715    ///
716    /// # Examples
717    ///
718    /// ```
719    /// use vexide::prelude::*;
720    ///
721    /// #[vexide::main]
722    /// async fn main(peripherals: Peripherals) {
723    ///     let mut ai_vision = AiVisionSensor::new(peripherals.port_1);
724    ///     _ = ai_vision.set_detection_mode(AiVisionDetectionMode::COLOR | AiVisionDetectionMode::COLOR_MERGE);
725    /// }
726    /// ```
727    pub fn set_detection_mode(&mut self, mode: AiVisionDetectionMode) -> Result<()> {
728        let flags = (self.flags()?
729            & (AiVisionFlags::DISABLE_USB_OVERLAY | AiVisionFlags::DISABLE_STATUS_OVERLAY))
730            | AiVisionFlags::from(mode);
731        self.set_flags(flags)
732    }
733
734    fn raw_status(&self) -> Result<u32> {
735        self.validate_port()?;
736        let status = unsafe { vexDeviceAiVisionStatusGet(self.device) };
737        Ok(status)
738    }
739
740    /// Returns the current flags of the AI Vision sensor including the detection mode
741    /// flags set by [`Self::set_detection_mode`].
742    ///
743    /// # Errors
744    ///
745    /// - A [`PortError`] is returned if an AI Vision is not connected to the Smart Port.
746    ///
747    /// # Examples
748    ///
749    /// ```
750    /// use vexide::prelude::*;
751    ///
752    /// #[vexide::main]
753    /// async fn main(peripherals: Peripherals) {
754    ///     let ai_vision = AiVisionSensor::new(peripherals.port_1);
755    ///     println!("{:?}", ai_vision.flags());
756    /// }
757    /// ```
758    pub fn flags(&self) -> Result<AiVisionFlags> {
759        // Only care about the first byte of status.
760        // See https://play.rust-lang.org/?version=stable&mode=debug&edition=2021&gist=c988c99e1f9b3a6d3c3fd91591b6dac1
761        Ok(AiVisionFlags::from_bits_retain(
762            (self.raw_status()? & 0xff) as u8,
763        ))
764    }
765
766    /// Set the full flags of the AI Vision sensor, including the detection mode.
767    ///
768    /// # Errors
769    ///
770    /// - A [`PortError`] is returned if an AI Vision is not connected to the Smart Port.
771    ///
772    /// # Examples
773    ///
774    /// ```
775    /// use vexide::prelude::*;
776    ///
777    /// #[vexide::main]
778    /// async fn main(peripherals: Peripherals) {
779    ///     let mut ai_vision = AiVisionSensor::new(peripherals.port_1);
780    ///     // Enable all detection modes except for custom model and disable USB overlay
781    ///     let flags = AiVisionFlags::DISABLE_USB_OVERLAY | AiVisionFlags::DISABLE_MODEL;
782    ///     _ = ai_vision.set_flags(flags);
783    /// }
784    /// ```
785    pub fn set_flags(&mut self, mode: AiVisionFlags) -> Result<()> {
786        // Status is shifted to the right from mode. Least-significant byte is missing.
787        // See https://play.rust-lang.org/?version=stable&mode=debug&edition=2021&gist=c988c99e1f9b3a6d3c3fd91591b6dac1
788        let mut new_mode = self.raw_status()? << 8;
789
790        new_mode &= !(0xff << 8); // Clear the mode bits.
791                                  // Set the mode bits and set the update flag in byte 4.
792        new_mode |= (u32::from(mode.bits()) << 8) | Self::MODE_SET_FLAG;
793
794        // Update mode
795        unsafe { vexDeviceAiVisionModeSet(self.device, new_mode) }
796
797        Ok(())
798    }
799
800    /// Restarts the automatic white balance process.
801    ///
802    /// # Errors
803    ///
804    /// - A [`PortError`] is returned if an AI Vision is not connected to the Smart Port.
805    pub fn start_awb(&mut self) -> Result<()> {
806        // Status is shifted to the right from mode. Least-significant byte is missing.
807        // See https://play.rust-lang.org/?version=stable&mode=debug&edition=2021&gist=c988c99e1f9b3a6d3c3fd91591b6dac1
808        let mut new_mode = self.raw_status()? << 8;
809
810        new_mode &= !(0xff << 16); // Clear byte 3
811        new_mode |= (1 << 18) | Self::AWB_START_FLAG;
812
813        // Update mode
814        unsafe { vexDeviceAiVisionModeSet(self.device, new_mode) }
815
816        Ok(())
817    }
818
819    /// Unknown Use
820    ///
821    /// # Errors
822    ///
823    /// - A [`PortError`] is returned if an AI Vision is not connected to the Smart Port.
824    pub fn enable_test(&mut self, test: u8) -> Result<()> {
825        // Status is shifted to the right from mode. Least-significant byte is missing.
826        // See https://play.rust-lang.org/?version=stable&mode=debug&edition=2021&gist=c988c99e1f9b3a6d3c3fd91591b6dac1
827        let mut new_mode = self.raw_status()? << 8;
828
829        new_mode &= !(0xff << 16); // Clear byte 3
830        new_mode |= (u32::from(test) << 16) | Self::TEST_MODE_FLAG;
831
832        // Update mode
833        unsafe { vexDeviceAiVisionModeSet(self.device, new_mode) }
834
835        Ok(())
836    }
837
838    /// Sets the family of apriltag that will be detected
839    ///
840    /// # Errors
841    ///
842    /// - A [`PortError`] is returned if an AI Vision is not connected to the Smart Port.
843    ///
844    /// # Examples
845    ///
846    /// ```
847    /// use vexide::prelude::*;
848    ///
849    /// #[vexide::main]
850    /// async fn main(peripherals: Peripherals) {
851    ///     let mut ai_vision = AiVisionSensor::new(peripherals.port_1);
852    ///     _ = ai_vision.set_apriltag_family(AprilTagFamily::Tag16h5);
853    /// }
854    /// ```
855    pub fn set_apriltag_family(&mut self, family: AprilTagFamily) -> Result<()> {
856        // Status is shifted to the right from mode. Least-significant byte is missing.
857        // See https://play.rust-lang.org/?version=stable&mode=debug&edition=2021&gist=c988c99e1f9b3a6d3c3fd91591b6dac1
858        let mut new_mode = self.raw_status()? << 8;
859
860        new_mode &= !(0xff << 16); // Clear the existing apriltag family bits.
861        new_mode |= u32::from(family as u8) << 16 | Self::TAG_SET_FLAG; // Set family bits
862
863        // Update mode
864        unsafe { vexDeviceAiVisionModeSet(self.device, new_mode) }
865
866        Ok(())
867    }
868
869    /// Returns all objects detected by the AI Vision sensor.
870    ///
871    /// # Errors
872    ///
873    /// - A [`PortError`] is returned if an AI Vision is not connected to the Smart Port.
874    ///
875    /// # Examples
876    ///
877    /// Loop through all objects of a specific type
878    /// ```
879    /// use vexide::prelude::*;
880    ///
881    /// #[vexide::main]
882    /// async fn main(peripherals: Peripherals) {
883    ///     let mut ai_vision = AiVisionSensor::new(peripherals.port_1);
884    ///     loop {
885    ///         let objects = ai_vision.objects().unwrap();
886    ///         for object in objects {
887    ///             if let AiVisionObjectData::Color { position, .. } = object.data {
888    ///                 println!("{:?}", position);
889    ///             }
890    ///         }
891    ///         sleep(AiVisionSensor::UPDATE_INTERVAL).await;
892    ///     }
893    /// }
894    /// ```
895    pub fn objects(&self) -> Result<Vec<AiVisionObject>> {
896        let num_objects = self.object_count()?;
897
898        let mut objects = Vec::new();
899        for i in 0..num_objects {
900            unsafe {
901                let mut raw: V5_DeviceAiVisionObject = core::mem::zeroed();
902                vexDeviceAiVisionObjectGet(self.device, i, core::ptr::from_mut(&mut raw));
903
904                let object = match raw.r#type.into() {
905                    ObjectType::Color => AiVisionObject::Color {
906                        id: raw.id,
907                        position: Point2 {
908                            x: raw.object.color.xoffset,
909                            y: raw.object.color.yoffset,
910                        },
911                        width: raw.object.color.width,
912                        height: raw.object.color.height,
913                    },
914                    ObjectType::Code => AiVisionObject::Code {
915                        id: raw.id,
916                        position: Point2 {
917                            x: raw.object.color.xoffset,
918                            y: raw.object.color.yoffset,
919                        },
920                        width: raw.object.color.width,
921                        height: raw.object.color.height,
922                        angle: f64::from(raw.object.color.angle) / 10.0,
923                    },
924                    ObjectType::Model => AiVisionObject::Model {
925                        id: raw.id,
926                        classification: {
927                            let ptr = CString::default().into_raw();
928
929                            vexDeviceAiVisionClassNameGet(
930                                self.device,
931                                i32::from(raw.id),
932                                ptr.cast(),
933                            );
934
935                            CString::from_raw(ptr).into_string()?
936                        },
937                        position: Point2 {
938                            x: raw.object.model.xoffset,
939                            y: raw.object.model.yoffset,
940                        },
941                        width: raw.object.model.width,
942                        height: raw.object.model.height,
943                        confidence: raw.object.model.score,
944                    },
945                    ObjectType::AprilTag => AiVisionObject::AprilTag {
946                        id: raw.id,
947                        top_left: mint::Point2 {
948                            x: raw.object.tag.x0,
949                            y: raw.object.tag.y0,
950                        },
951                        top_right: mint::Point2 {
952                            x: raw.object.tag.x1,
953                            y: raw.object.tag.y1,
954                        },
955                        bottom_right: mint::Point2 {
956                            x: raw.object.tag.x2,
957                            y: raw.object.tag.y2,
958                        },
959                        bottom_left: mint::Point2 {
960                            x: raw.object.tag.x3,
961                            y: raw.object.tag.y3,
962                        },
963                    },
964                    _ => return Err(AiVisionError::InvalidObject),
965                };
966
967                objects.push(object);
968            }
969        }
970
971        Ok(objects)
972    }
973
974    /// Returns the number of objects currently detected by the AI Vision sensor.
975    ///
976    /// # Errors
977    ///
978    /// - A [`PortError`] is returned if an AI Vision is not connected toMODE_MAGIC_BIT the Smart Port.
979    ///
980    /// # Examples
981    ///
982    /// ```
983    /// use vexide::prelude::*;
984    ///
985    /// #[vexide::main]
986    /// async fn main(peripherals: Peripherals) {
987    ///     let mut ai_vision = AiVisionSensor::new(peripherals.port_1);
988    ///     loop {
989    ///         println!("AI Vision sensor currently detects {:?} objects", ai_vision.object_count());
990    ///         sleep(AiVisionSensor::UPDATE_INTERVAL).await;
991    ///     }
992    /// }
993    /// ```
994    pub fn object_count(&self) -> Result<u32> {
995        self.validate_port()?;
996        Ok(unsafe { vexDeviceAiVisionObjectCountGet(self.device) as _ })
997    }
998}
999
1000impl SmartDevice for AiVisionSensor {
1001    fn port_number(&self) -> u8 {
1002        self.port.number()
1003    }
1004
1005    fn device_type(&self) -> SmartDeviceType {
1006        SmartDeviceType::AiVision
1007    }
1008}
1009impl From<AiVisionSensor> for SmartPort {
1010    fn from(val: AiVisionSensor) -> Self {
1011        val.port
1012    }
1013}
1014
1015#[derive(Debug, Snafu)]
1016/// Errors that can occur when using a vision sensor.
1017pub enum AiVisionError {
1018    /// An object created by VEXos failed to be converted.
1019    InvalidObject,
1020    /// Failed to fetch the class name of a model-detected object due it having a invalid
1021    /// string representation.
1022    #[snafu(transparent)]
1023    InvalidClassName {
1024        /// The source of the error.
1025        source: IntoStringError,
1026    },
1027    /// Generic port related error.
1028    #[snafu(transparent)]
1029    Port {
1030        /// The source of the error.
1031        source: PortError,
1032    },
1033}