objc2_core_image/generated/
CIDetector.rs

1//! This file has been automatically generated by `objc2`'s `header-translator`.
2//! DO NOT EDIT
3use core::ptr::NonNull;
4use objc2::__framework_prelude::*;
5use objc2_foundation::*;
6
7use crate::*;
8
9extern_class!(
10    /// Detects features in images.
11    ///
12    /// This class potentially holds onto a lot of state. Hence it may be beneficial from a performance perspective to re-use the same CIDetector instance. Specifying a CIContext when creating a detector may have an impact on performance since this context may be used when analyzing an image.
13    ///
14    /// See also [Apple's documentation](https://developer.apple.com/documentation/coreimage/cidetector?language=objc)
15    #[unsafe(super(NSObject))]
16    #[derive(Debug, PartialEq, Eq, Hash)]
17    pub struct CIDetector;
18);
19
20extern_conformance!(
21    unsafe impl NSObjectProtocol for CIDetector {}
22);
23
24impl CIDetector {
25    extern_methods!(
26        #[cfg(feature = "CIContext")]
27        /// Returns a new detector instance of the given type.
28        ///
29        /// The type is used to specify the detection intent.
30        /// This will return value if the detector type is not supported.
31        ///
32        /// The context argument specifies the CIContext to be used to operate on the image. May be nil.
33        ///
34        /// If the input image to -featuresInImage: is the output of a CoreImage operation, it may improve performance to specify the same context that was used to operate on that image.
35        ///
36        /// The detector may do image processing in this context and if the image is on the GPU and the specified context is a GPU context this may avoid additional upload to / download from the GPU. If the input image is on the CPU (or the output from a CPU based context) specifying a GPU based context (or vice versa) may reduce performance.
37        ///
38        /// //  The options parameter lets you optionally specify a accuracy / performance tradeoff. Can be nil or an empty dictionary.
39        ///
40        /// # Safety
41        ///
42        /// `options` generic should be of the correct type.
43        #[unsafe(method(detectorOfType:context:options:))]
44        #[unsafe(method_family = none)]
45        pub unsafe fn detectorOfType_context_options(
46            r#type: &NSString,
47            context: Option<&CIContext>,
48            options: Option<&NSDictionary<NSString, AnyObject>>,
49        ) -> Option<Retained<CIDetector>>;
50
51        #[cfg(all(feature = "CIFeature", feature = "CIImage"))]
52        /// Returns an array of CIFeature instances in the given image.
53        /// The array is sorted by confidence, highest confidence first.
54        #[unsafe(method(featuresInImage:))]
55        #[unsafe(method_family = none)]
56        pub unsafe fn featuresInImage(&self, image: &CIImage) -> Retained<NSArray<CIFeature>>;
57
58        #[cfg(all(feature = "CIFeature", feature = "CIImage"))]
59        /// Returns an array of CIFeature instances in the given image.
60        /// The array is sorted by confidence, highest confidence first.
61        /// The options dictionary can contain a CIDetectorImageOrientation key value.
62        ///
63        /// # Safety
64        ///
65        /// `options` generic should be of the correct type.
66        #[unsafe(method(featuresInImage:options:))]
67        #[unsafe(method_family = none)]
68        pub unsafe fn featuresInImage_options(
69            &self,
70            image: &CIImage,
71            options: Option<&NSDictionary<NSString, AnyObject>>,
72        ) -> Retained<NSArray<CIFeature>>;
73    );
74}
75
76/// Methods declared on superclass `NSObject`.
77impl CIDetector {
78    extern_methods!(
79        #[unsafe(method(init))]
80        #[unsafe(method_family = init)]
81        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
82
83        #[unsafe(method(new))]
84        #[unsafe(method_family = new)]
85        pub unsafe fn new() -> Retained<Self>;
86    );
87}
88
89extern "C" {
90    /// [Apple's documentation](https://developer.apple.com/documentation/coreimage/cidetectortypeface?language=objc)
91    pub static CIDetectorTypeFace: &'static NSString;
92}
93
94extern "C" {
95    /// [Apple's documentation](https://developer.apple.com/documentation/coreimage/cidetectortyperectangle?language=objc)
96    pub static CIDetectorTypeRectangle: &'static NSString;
97}
98
99extern "C" {
100    /// [Apple's documentation](https://developer.apple.com/documentation/coreimage/cidetectortypeqrcode?language=objc)
101    pub static CIDetectorTypeQRCode: &'static NSString;
102}
103
104extern "C" {
105    /// [Apple's documentation](https://developer.apple.com/documentation/coreimage/cidetectortypetext?language=objc)
106    pub static CIDetectorTypeText: &'static NSString;
107}
108
109extern "C" {
110    /// [Apple's documentation](https://developer.apple.com/documentation/coreimage/cidetectoraccuracy?language=objc)
111    pub static CIDetectorAccuracy: &'static NSString;
112}
113
114extern "C" {
115    /// [Apple's documentation](https://developer.apple.com/documentation/coreimage/cidetectoraccuracylow?language=objc)
116    pub static CIDetectorAccuracyLow: &'static NSString;
117}
118
119extern "C" {
120    /// [Apple's documentation](https://developer.apple.com/documentation/coreimage/cidetectoraccuracyhigh?language=objc)
121    pub static CIDetectorAccuracyHigh: &'static NSString;
122}
123
124extern "C" {
125    /// [Apple's documentation](https://developer.apple.com/documentation/coreimage/cidetectortracking?language=objc)
126    pub static CIDetectorTracking: &'static NSString;
127}
128
129extern "C" {
130    /// [Apple's documentation](https://developer.apple.com/documentation/coreimage/cidetectorminfeaturesize?language=objc)
131    pub static CIDetectorMinFeatureSize: &'static NSString;
132}
133
134extern "C" {
135    /// [Apple's documentation](https://developer.apple.com/documentation/coreimage/cidetectormaxfeaturecount?language=objc)
136    pub static CIDetectorMaxFeatureCount: &'static NSString;
137}
138
139extern "C" {
140    /// [Apple's documentation](https://developer.apple.com/documentation/coreimage/cidetectornumberofangles?language=objc)
141    pub static CIDetectorNumberOfAngles: &'static NSString;
142}
143
144extern "C" {
145    /// A dictionary key that configures a Core Image feature detection operation
146    /// to account for the orientation the image.
147    ///
148    /// This option is used with ``/CIDetector/featuresInImage:options:``
149    ///
150    /// The value of this key is an number object whose value is an integer between 1 and 8.
151    /// The TIFF and EXIF specifications define the orientation values that describe how the image should be displayed.
152    /// The default value is 1. For further details, see `CGImagePropertyOrientation`.
153    ///
154    /// The ``CIDetectorTypeFace`` and ``CIDetectorTypeText`` can use this option to correctly find faces or text.
155    ///
156    /// Regardless of the orientation values the ``/CIFeature/bounds-property`` which is always measured in
157    /// the cartesean coordinates system of the image that you pass to the detector.
158    ///
159    /// See also [Apple's documentation](https://developer.apple.com/documentation/coreimage/cidetectorimageorientation?language=objc)
160    pub static CIDetectorImageOrientation: &'static NSString;
161}
162
163extern "C" {
164    /// A dictionary key that configures a Core Image face feature detection operation
165    /// to perform additional processing to recognize closed eyes in detected faces.
166    ///
167    /// This option is used with ``/CIDetector/featuresInImage:options:``
168    ///
169    /// If the value of the key is true, then facial expressions such as blinking and smiles are extracted.
170    /// This is needed for the ``/CIFaceFeature/leftEyeClosed-property`` and ``/CIFaceFeature/rightEyeClosed-property`` to function.
171    ///
172    /// See also [Apple's documentation](https://developer.apple.com/documentation/coreimage/cidetectoreyeblink?language=objc)
173    pub static CIDetectorEyeBlink: &'static NSString;
174}
175
176extern "C" {
177    /// A dictionary key that configures a Core Image face feature detection operation
178    /// to perform additional processing to recognize smiles in detected faces.
179    ///
180    /// This option is used with ``/CIDetector/featuresInImage:options:``
181    ///
182    /// If the value of the key is true, then facial expressions such as blinking and smiles eyes are extracted.
183    /// This is needed for the ``/CIFaceFeature/hasSmile-property`` to function.
184    ///
185    /// See also [Apple's documentation](https://developer.apple.com/documentation/coreimage/cidetectorsmile?language=objc)
186    pub static CIDetectorSmile: &'static NSString;
187}
188
189extern "C" {
190    /// A dictionary key that configures a Core Image rectangle feature detection operation
191    /// to account for the focal length of the camera used for the image.
192    ///
193    /// This option is used with ``/CIDetector/featuresInImage:options:``
194    ///
195    /// The value of this key is an NSNumber object whose value is a floating-point number. Use this option with the CIDetectorTypeRectangle
196    /// detector type to control the effect of the CIDetectorAspectRatio option on feature detection.
197    ///
198    /// This option’s value can be 0.0, -1.0, or any positive value:
199    /// * The special value of -1.0 (the default) disables the aspect ratio test for the returned rectangle.
200    /// * The special value of 0.0 enables a less precise test of aspect ratio that approximates an orthographic (non-perspective) projection.
201    /// Use this value if you want to specify the aspect ratio of the rectangle via the CIDetectorAspectRatio option, but have no means of
202    /// determining the value for the focal length in pixels. See below for a method to compute an approximate value for the focal length in pixels.
203    /// * Any other value specifies the camera focal length, in pixels, allowing the aspect ratio specification to account for perspective distortion
204    /// of rectangles in the input image.
205    ///
206    /// If you know the diagonal field of view of the camera (the scene angle subtended by the diagonal corners of an image), you can use the
207    /// following formula to compute an approximate focal length in pixels:
208    ///
209    /// `focal_length_pixels = (image_diagonal_pixels/2)/tan(FOV/2)`
210    ///
211    /// In this formula, `image_diagonal_pixels` is the length (in pixels) of the image diagonal of the maximum resolution of the camera sensor.
212    /// For example, this value is:
213    /// * `4080` pixels for a `3264 x 2448` (8 megapixel) sensor
214    /// * `5000` pixels for a `4096 x 3024` (12 megapixel) sensor.
215    ///
216    /// To measure diagonal field of view, put the camera on a tripod so that it is perpendicular to a surface and the center of the image is
217    /// oriented on a mark on the surface. Measure the distance from the mark to one of the corner points of the image (Y). Measure the distance
218    /// from the camera to the surface (Z). The field of view is then `2*arctan(Y/Z)`.
219    ///
220    /// You must specify this value in terms of the maximum sensor resolution. If the supplied CIImage has been scaled relative relative to the
221    /// maximum sensor resolution, the supplied focal length must also be similarly scaled.
222    ///
223    /// See also [Apple's documentation](https://developer.apple.com/documentation/coreimage/cidetectorfocallength?language=objc)
224    pub static CIDetectorFocalLength: &'static NSString;
225}
226
227extern "C" {
228    /// A dictionary key that configures a Core Image rectangle feature detection operation
229    /// to search for a rectangle of a desired aspect ratio (width divided by height).
230    ///
231    /// This option is used with ``/CIDetector/featuresInImage:options:``
232    ///
233    /// The value for this key needs to be is a positive float number.
234    /// Use this option with a ``CIDetectorTypeRectangle`` detector to fine-tune the accuracy of the detector.
235    ///
236    /// For example, to more accurately find a business card (3.5 x 2 inches) in an image, specify an aspect ratio of 1.75.
237    ///
238    /// If this key is not specified, the a default value of 1.6 is used.
239    ///
240    /// See also [Apple's documentation](https://developer.apple.com/documentation/coreimage/cidetectoraspectratio?language=objc)
241    pub static CIDetectorAspectRatio: &'static NSString;
242}
243
244extern "C" {
245    /// A dictionary key that configures a Core Image text feature detection operation
246    /// to return feature information for components of detected features.
247    ///
248    /// This option is used with ``/CIDetector/featuresInImage:options:``
249    ///
250    /// If the value for this option configures the ``CIDetectorTypeText`` detector as follows:
251    /// * False: detect only in regions likely to contain text.
252    /// * True: detect in regions likely to contain individual characters.
253    ///
254    /// If this key is not specified, the a default is False.
255    ///
256    /// See also [Apple's documentation](https://developer.apple.com/documentation/coreimage/cidetectorreturnsubfeatures?language=objc)
257    pub static CIDetectorReturnSubFeatures: &'static NSString;
258}