objc2_av_foundation/generated/
AVCaptureDevice.rs

1//! This file has been automatically generated by `objc2`'s `header-translator`.
2//! DO NOT EDIT
3use core::ffi::*;
4use core::ptr::NonNull;
5use objc2::__framework_prelude::*;
6#[cfg(feature = "objc2-core-foundation")]
7use objc2_core_foundation::*;
8#[cfg(feature = "objc2-core-media")]
9use objc2_core_media::*;
10use objc2_foundation::*;
11#[cfg(feature = "objc2-quartz-core")]
12#[cfg(not(target_os = "watchos"))]
13use objc2_quartz_core::*;
14
15use crate::*;
16
17extern "C" {
18    /// Posted when a device becomes available on the system.
19    ///
20    ///
21    /// The notification object is an AVCaptureDevice instance representing the device that became available.
22    ///
23    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturedevicewasconnectednotification?language=objc)
24    pub static AVCaptureDeviceWasConnectedNotification: &'static NSNotificationName;
25}
26
27extern "C" {
28    /// Posted when a device becomes unavailable on the system.
29    ///
30    ///
31    /// The notification object is an AVCaptureDevice instance representing the device that became unavailable.
32    ///
33    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturedevicewasdisconnectednotification?language=objc)
34    pub static AVCaptureDeviceWasDisconnectedNotification: &'static NSNotificationName;
35}
36
37extern "C" {
38    /// Posted when the instance of AVCaptureDevice has detected a substantial change to the video subject area.
39    ///
40    ///
41    /// Clients may observe the AVCaptureDeviceSubjectAreaDidChangeNotification to know when an instance of AVCaptureDevice has detected a substantial change to the video subject area. This notification is only sent if you first set subjectAreaChangeMonitoringEnabled to YES.
42    ///
43    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturedevicesubjectareadidchangenotification?language=objc)
44    pub static AVCaptureDeviceSubjectAreaDidChangeNotification: &'static NSNotificationName;
45}
46
47extern_class!(
48    /// An AVCaptureDevice represents a physical device that provides realtime input media data, such as video and audio.
49    ///
50    ///
51    /// Each instance of AVCaptureDevice corresponds to a device, such as a camera or microphone. Instances of AVCaptureDevice cannot be created directly. An array of all currently available devices can also be obtained using the AVCaptureDeviceDiscoverySession. Devices can provide one or more streams of a given media type. Applications can search for devices matching desired criteria by using AVCaptureDeviceDiscoverySession, or may obtain a reference to the default device matching desired criteria by using +[AVCaptureDevice defaultDeviceWithDeviceType:mediaType:position:].
52    ///
53    /// Instances of AVCaptureDevice can be used to provide media data to an AVCaptureSession by creating an AVCaptureDeviceInput with the device and adding that to the capture session.
54    ///
55    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturedevice?language=objc)
56    #[unsafe(super(NSObject))]
57    #[derive(Debug, PartialEq, Eq, Hash)]
58    pub struct AVCaptureDevice;
59);
60
61extern_conformance!(
62    unsafe impl NSObjectProtocol for AVCaptureDevice {}
63);
64
65impl AVCaptureDevice {
66    extern_methods!(
67        #[unsafe(method(init))]
68        #[unsafe(method_family = init)]
69        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
70
71        #[unsafe(method(new))]
72        #[unsafe(method_family = new)]
73        pub unsafe fn new() -> Retained<Self>;
74
75        /// Returns an array of devices currently available for use as media input sources.
76        ///
77        ///
78        /// Returns: An NSArray of AVCaptureDevice instances for each available device.
79        ///
80        ///
81        /// This method returns an array of AVCaptureDevice instances for input devices currently connected and available for capture. The returned array contains all devices that are available at the time the method is called. Applications should observe AVCaptureDeviceWasConnectedNotification and AVCaptureDeviceWasDisconnectedNotification to be notified when the list of available devices has changed.
82        #[deprecated = "Use AVCaptureDeviceDiscoverySession instead."]
83        #[unsafe(method(devices))]
84        #[unsafe(method_family = none)]
85        pub unsafe fn devices() -> Retained<NSArray<AVCaptureDevice>>;
86
87        #[cfg(feature = "AVMediaFormat")]
88        /// Returns an array of devices currently available for use as sources of media with the given media type.
89        ///
90        ///
91        /// Parameter `mediaType`: The media type, such as AVMediaTypeVideo, AVMediaTypeAudio, or AVMediaTypeMuxed, supported by each returned device.
92        ///
93        /// Returns: An NSArray of AVCaptureDevice instances for each available device.
94        ///
95        ///
96        /// This method returns an array of AVCaptureDevice instances for input devices currently connected and available for capture that provide media of the given type. Media type constants are defined in AVMediaFormat.h. The returned array contains all devices that are available at the time the method is called. Applications should observe AVCaptureDeviceWasConnectedNotification and AVCaptureDeviceWasDisconnectedNotification to be notified when the list of available devices has changed.
97        #[deprecated = "Use AVCaptureDeviceDiscoverySession instead."]
98        #[unsafe(method(devicesWithMediaType:))]
99        #[unsafe(method_family = none)]
100        pub unsafe fn devicesWithMediaType(
101            media_type: &AVMediaType,
102        ) -> Retained<NSArray<AVCaptureDevice>>;
103
104        #[cfg(feature = "AVMediaFormat")]
105        /// Returns an AVCaptureDevice instance for the default device of the given media type.
106        ///
107        ///
108        /// Parameter `mediaType`: The media type, such as AVMediaTypeVideo, AVMediaTypeAudio, or AVMediaTypeMuxed, supported by the returned device.
109        ///
110        /// Returns: The default device with the given media type, or nil if no device with that media type exists.
111        ///
112        ///
113        /// This method returns the default device of the given media type currently available on the system. For example, for AVMediaTypeVideo, this method will return the built in camera that is primarily used for capture and recording. Media type constants are defined in AVMediaFormat.h.
114        #[unsafe(method(defaultDeviceWithMediaType:))]
115        #[unsafe(method_family = none)]
116        pub unsafe fn defaultDeviceWithMediaType(
117            media_type: &AVMediaType,
118        ) -> Option<Retained<AVCaptureDevice>>;
119
120        /// Returns an AVCaptureDevice instance with the given unique ID.
121        ///
122        ///
123        /// Parameter `deviceUniqueID`: The unique ID of the device instance to be returned.
124        ///
125        /// Returns: An AVCaptureDevice instance with the given unique ID, or nil if no device with that unique ID is available.
126        ///
127        ///
128        /// Every available capture device has a unique ID that persists on one system across device connections and disconnections, application restarts, and reboots of the system itself. This method can be used to recall or track the status of a specific device whose unique ID has previously been saved.
129        #[unsafe(method(deviceWithUniqueID:))]
130        #[unsafe(method_family = none)]
131        pub unsafe fn deviceWithUniqueID(
132            device_unique_id: &NSString,
133        ) -> Option<Retained<AVCaptureDevice>>;
134
135        /// An ID unique to the model of device corresponding to the receiver.
136        ///
137        ///
138        /// Every available capture device has a unique ID that persists on one system across device connections and disconnections, application restarts, and reboots of the system itself. Applications can store the value returned by this property to recall or track the status of a specific device in the future.
139        #[unsafe(method(uniqueID))]
140        #[unsafe(method_family = none)]
141        pub unsafe fn uniqueID(&self) -> Retained<NSString>;
142
143        /// The model ID of the receiver.
144        ///
145        ///
146        /// The value of this property is an identifier unique to all devices of the same model. The value is persistent across device connections and disconnections, and across different systems. For example, the model ID of the camera built in to two identical iPhone models will be the same even though they are different physical devices.
147        #[unsafe(method(modelID))]
148        #[unsafe(method_family = none)]
149        pub unsafe fn modelID(&self) -> Retained<NSString>;
150
151        /// A localized human-readable name for the receiver.
152        ///
153        ///
154        /// This property can be used for displaying the name of a capture device in a user interface.
155        #[unsafe(method(localizedName))]
156        #[unsafe(method_family = none)]
157        pub unsafe fn localizedName(&self) -> Retained<NSString>;
158
159        /// The human-readable manufacturer name for the receiver.
160        ///
161        ///
162        /// This property can be used to identify capture devices from a particular manufacturer. All Apple devices return "Apple Inc.". Devices from third party manufacturers may return an empty string.
163        #[unsafe(method(manufacturer))]
164        #[unsafe(method_family = none)]
165        pub unsafe fn manufacturer(&self) -> Retained<NSString>;
166
167        /// The transport type of the receiver (e.g. USB, PCI, etc).
168        ///
169        ///
170        /// This property can be used to discover the transport type of a capture device. Transport types are defined in
171        /// <IOKit
172        /// /audio/IOAudioTypes.h> as kIOAudioDeviceTransportType*.
173        #[unsafe(method(transportType))]
174        #[unsafe(method_family = none)]
175        pub unsafe fn transportType(&self) -> i32;
176
177        #[cfg(feature = "AVMediaFormat")]
178        /// Returns whether the receiver provides media with the given media type.
179        ///
180        ///
181        /// Parameter `mediaType`: A media type, such as AVMediaTypeVideo, AVMediaTypeAudio, or AVMediaTypeMuxed.
182        ///
183        /// Returns: YES if the device outputs the given media type, NO otherwise.
184        ///
185        ///
186        /// Media type constants are defined in AVMediaFormat.h.
187        #[unsafe(method(hasMediaType:))]
188        #[unsafe(method_family = none)]
189        pub unsafe fn hasMediaType(&self, media_type: &AVMediaType) -> bool;
190
191        /// Requests exclusive access to configure device hardware properties.
192        ///
193        ///
194        /// Parameter `outError`: On return, if the device could not be locked, points to an NSError describing why the failure occurred.
195        ///
196        /// Returns: A BOOL indicating whether the device was successfully locked for configuration.
197        ///
198        ///
199        /// In order to set hardware properties on an AVCaptureDevice, such as focusMode and exposureMode, clients must first acquire a lock on the device. Clients should only hold the device lock if they require settable device properties to remain unchanged. Holding the device lock unnecessarily may degrade capture quality in other applications sharing the device.
200        #[unsafe(method(lockForConfiguration:_))]
201        #[unsafe(method_family = none)]
202        pub unsafe fn lockForConfiguration(&self) -> Result<(), Retained<NSError>>;
203
204        /// Release exclusive control over device hardware properties.
205        ///
206        ///
207        /// This method should be called to match an invocation of lockForConfiguration: when an application no longer needs to keep device hardware properties from changing automatically.
208        #[unsafe(method(unlockForConfiguration))]
209        #[unsafe(method_family = none)]
210        pub unsafe fn unlockForConfiguration(&self);
211
212        #[cfg(feature = "AVCaptureSessionPreset")]
213        /// Returns whether the receiver can be used in an AVCaptureSession configured with the given preset.
214        ///
215        ///
216        /// Parameter `preset`: An AVCaptureSession preset.
217        ///
218        /// Returns: YES if the receiver can be used with the given preset, NO otherwise.
219        ///
220        ///
221        /// An AVCaptureSession instance can be associated with a preset that configures its inputs and outputs to fulfill common use cases. This method can be used to determine if the receiver can be used in a capture session with the given preset. Presets are defined in AVCaptureSession.h.
222        #[unsafe(method(supportsAVCaptureSessionPreset:))]
223        #[unsafe(method_family = none)]
224        pub unsafe fn supportsAVCaptureSessionPreset(
225            &self,
226            preset: &AVCaptureSessionPreset,
227        ) -> bool;
228
229        /// Indicates whether the device is connected and available to the system.
230        ///
231        ///
232        /// The value of this property is a BOOL indicating whether the device represented by the receiver is connected and available for use as a capture device. Clients can key value observe the value of this property to be notified when a device is no longer available. When the value of this property becomes NO for a given instance, it will not become YES again. If the same physical device again becomes available to the system, it will be represented using a new instance of AVCaptureDevice.
233        #[unsafe(method(isConnected))]
234        #[unsafe(method_family = none)]
235        pub unsafe fn isConnected(&self) -> bool;
236
237        /// Indicates whether the device is in use by another application.
238        ///
239        ///
240        /// The value of this property is a BOOL indicating whether the device represented by the receiver is in use by another application. Clients can key value observe the value of this property to be notified when another app starts or stops using this device.
241        #[unsafe(method(isInUseByAnotherApplication))]
242        #[unsafe(method_family = none)]
243        pub unsafe fn isInUseByAnotherApplication(&self) -> bool;
244
245        /// Indicates whether the device is suspended.
246        ///
247        ///
248        /// The value of this property is a BOOL indicating whether the device represented by the receiver is currently suspended. Some devices disallow data capture due to a feature on the device. For example, isSuspended returns YES for the external iSight when its privacy iris is closed, or for the internal iSight on a notebook when the notebook's display is closed. Clients can key value observe the value of this property to be notified when the device becomes suspended or unsuspended.
249        #[unsafe(method(isSuspended))]
250        #[unsafe(method_family = none)]
251        pub unsafe fn isSuspended(&self) -> bool;
252
253        /// An array of AVCaptureDevice objects physically linked to the receiver.
254        ///
255        ///
256        /// The value of this property is an array of AVCaptureDevice objects that are a part of the same physical device as the receiver. For example, for the external iSight camera, linkedDevices returns an array containing an AVCaptureDevice for the external iSight microphone.
257        #[unsafe(method(linkedDevices))]
258        #[unsafe(method_family = none)]
259        pub unsafe fn linkedDevices(&self) -> Retained<NSArray<AVCaptureDevice>>;
260
261        /// An array of AVCaptureDeviceFormat objects supported by the receiver.
262        ///
263        ///
264        /// This property can be used to enumerate the formats natively supported by the receiver. The capture device's activeFormat property may be set to one of the formats in this array. Clients can observe automatic changes to the receiver's formats by key value observing this property.
265        #[unsafe(method(formats))]
266        #[unsafe(method_family = none)]
267        pub unsafe fn formats(&self) -> Retained<NSArray<AVCaptureDeviceFormat>>;
268
269        /// The currently active format of the receiver.
270        ///
271        ///
272        /// This property can be used to get or set the currently active device format.
273        ///
274        /// -setActiveFormat: throws an NSInvalidArgumentException if set to a format not present in the formats array.
275        ///
276        /// -setActiveFormat: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
277        ///
278        /// Clients can observe automatic changes to the receiver's activeFormat by key value observing this property.
279        ///
280        /// On iOS, use of AVCaptureDevice's setActiveFormat: and AVCaptureSession's setSessionPreset: are mutually exclusive. If you set a capture device's active format, the session to which it is attached changes its preset to AVCaptureSessionPresetInputPriority. Likewise if you set the AVCaptureSession's sessionPreset property, the session assumes control of its input devices, and configures their activeFormat appropriately. Note that audio devices do not expose any user-configurable formats on iOS. To configure audio input on iOS, you should use the AVAudioSession APIs instead (see AVAudioSession.h).
281        ///
282        /// The activeFormat, activeVideoMinFrameDuration, and activeVideoMaxFrameDuration properties may be set simultaneously by using AVCaptureSession's begin/commitConfiguration methods:
283        ///
284        /// [session beginConfiguration]; // the session to which the receiver's AVCaptureDeviceInput is added.
285        /// if ( [device lockForConfiguration:
286        /// &error
287        /// ] ) {
288        /// [device setActiveFormat:newFormat];
289        /// [device setActiveVideoMinFrameDuration:newMinDuration];
290        /// [device setActiveVideoMaxFrameDuration:newMaxDuration];
291        /// [device unlockForConfiguration];
292        /// }
293        /// [session commitConfiguration]; // The new format and frame rates are applied together in commitConfiguration
294        ///
295        /// Note that when configuring a session to use an active format intended for high resolution still photography and applying one or more of the following operations to an AVCaptureVideoDataOutput, the system may not meet the target framerate: zoom, orientation changes, format conversion.
296        #[unsafe(method(activeFormat))]
297        #[unsafe(method_family = none)]
298        pub unsafe fn activeFormat(&self) -> Retained<AVCaptureDeviceFormat>;
299
300        /// Setter for [`activeFormat`][Self::activeFormat].
301        #[unsafe(method(setActiveFormat:))]
302        #[unsafe(method_family = none)]
303        pub unsafe fn setActiveFormat(&self, active_format: &AVCaptureDeviceFormat);
304
305        #[cfg(feature = "objc2-core-media")]
306        /// A property indicating the receiver's current active minimum frame duration (the reciprocal of its max frame rate).
307        ///
308        ///
309        /// An AVCaptureDevice's activeVideoMinFrameDuration property is the reciprocal of its active maximum frame rate. To limit the max frame rate of the capture device, clients may set this property to a value supported by the receiver's activeFormat (see AVCaptureDeviceFormat's videoSupportedFrameRateRanges property). Clients may set this property's value to kCMTimeInvalid to return activeVideoMinFrameDuration to its default value for the given activeFormat.
310        ///
311        /// -setActiveVideoMinFrameDuration: throws an NSInvalidArgumentException if set to an unsupported value.
312        ///
313        /// -setActiveVideoMinFrameDuration: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
314        ///
315        /// Clients can observe automatic changes to the receiver's activeVideoMinFrameDuration by key value observing this property.
316        ///
317        /// On iOS, the receiver's activeVideoMinFrameDuration resets to its default value under the following conditions:
318        /// - The receiver's activeFormat changes
319        /// - The receiver's AVCaptureDeviceInput's session's sessionPreset changes
320        /// - The receiver's AVCaptureDeviceInput is added to a session
321        ///
322        /// When exposureMode is AVCaptureExposureModeCustom, setting the activeVideoMinFrameDuration affects max frame rate, but not exposureDuration. You may use setExposureModeCustomWithDuration:ISO:completionHandler: to set a shorter exposureDuration than your activeVideoMinFrameDuration, if desired.
323        ///
324        /// When autoVideoFrameRateEnabled is true, setting activeVideoMinFrameDuration throws an NSInvalidArgumentException.
325        #[unsafe(method(activeVideoMinFrameDuration))]
326        #[unsafe(method_family = none)]
327        pub unsafe fn activeVideoMinFrameDuration(&self) -> CMTime;
328
329        #[cfg(feature = "objc2-core-media")]
330        /// Setter for [`activeVideoMinFrameDuration`][Self::activeVideoMinFrameDuration].
331        #[unsafe(method(setActiveVideoMinFrameDuration:))]
332        #[unsafe(method_family = none)]
333        pub unsafe fn setActiveVideoMinFrameDuration(
334            &self,
335            active_video_min_frame_duration: CMTime,
336        );
337
338        #[cfg(feature = "objc2-core-media")]
339        /// A property indicating the receiver's current active maximum frame duration (the reciprocal of its min frame rate).
340        ///
341        ///
342        /// An AVCaptureDevice's activeVideoMaxFrameDuration property is the reciprocal of its active minimum frame rate. To limit the min frame rate of the capture device, clients may set this property to a value supported by the receiver's activeFormat (see AVCaptureDeviceFormat's videoSupportedFrameRateRanges property). Clients may set this property's value to kCMTimeInvalid to return activeVideoMaxFrameDuration to its default value for the given activeFormat.
343        ///
344        /// -setActiveVideoMaxFrameDuration: throws an NSInvalidArgumentException if set to an unsupported value.
345        ///
346        /// -setActiveVideoMaxFrameDuration: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
347        ///
348        /// Clients can observe automatic changes to the receiver's activeVideoMaxFrameDuration by key value observing this property.
349        ///
350        /// On iOS, the receiver's activeVideoMaxFrameDuration resets to its default value under the following conditions:
351        /// - The receiver's activeFormat changes
352        /// - The receiver's AVCaptureDeviceInput's session's sessionPreset changes
353        /// - The receiver's AVCaptureDeviceInput is added to a session
354        ///
355        /// When exposureMode is AVCaptureExposureModeCustom, frame rate and exposure duration are interrelated. If you call setExposureModeCustomWithDuration:ISO:completionHandler: with an exposureDuration longer than the current activeVideoMaxFrameDuration, the activeVideoMaxFrameDuration will be lengthened to accommodate the longer exposure time. Setting a shorter exposure duration does not automatically change the activeVideoMinFrameDuration or activeVideoMaxFrameDuration. To explicitly increase the frame rate in custom exposure mode, you must set the activeVideoMaxFrameDuration to a shorter value. If your new max frame duration is shorter than the current exposureDuration, the exposureDuration will shorten as well to accommodate the new frame rate.
356        ///
357        /// When autoVideoFrameRateEnabled is true, setting activeVideoMaxFrameDuration throws an NSInvalidArgumentException.
358        #[unsafe(method(activeVideoMaxFrameDuration))]
359        #[unsafe(method_family = none)]
360        pub unsafe fn activeVideoMaxFrameDuration(&self) -> CMTime;
361
362        #[cfg(feature = "objc2-core-media")]
363        /// Setter for [`activeVideoMaxFrameDuration`][Self::activeVideoMaxFrameDuration].
364        #[unsafe(method(setActiveVideoMaxFrameDuration:))]
365        #[unsafe(method_family = none)]
366        pub unsafe fn setActiveVideoMaxFrameDuration(
367            &self,
368            active_video_max_frame_duration: CMTime,
369        );
370
371        /// Whether the device's video frame rate (expressed as a duration) is currently locked.
372        ///
373        /// Returns `true` when an ``AVCaptureDeviceInput`` associated with the device has its ``AVCaptureDeviceInput/activeLockedVideoFrameDuration`` property set to something other than `kCMTimeInvalid`. See ``AVCaptureDeviceInput/activeLockedVideoFrameDuration`` for more information on video frame duration locking.
374        #[unsafe(method(isVideoFrameDurationLocked))]
375        #[unsafe(method_family = none)]
376        pub unsafe fn isVideoFrameDurationLocked(&self) -> bool;
377
378        #[cfg(feature = "objc2-core-media")]
379        /// The maximum frame rate (expressed as a minimum duration) that can be set on an input associated with this device.
380        ///
381        /// `kCMTimeInvalid` is returned when the device or its current configuration does not support locked frame rate. Use ``AVCaptureDeviceInput/activeLockedVideoFrameDuration`` to set the locked frame rate on the input.
382        #[unsafe(method(minSupportedLockedVideoFrameDuration))]
383        #[unsafe(method_family = none)]
384        pub unsafe fn minSupportedLockedVideoFrameDuration(&self) -> CMTime;
385
386        /// Whether the device is following an external sync device.
387        ///
388        /// See ``AVCaptureDeviceInput/followExternalSyncDevice:videoFrameDuration:delegate:`` for more information on external sync.
389        #[unsafe(method(isFollowingExternalSyncDevice))]
390        #[unsafe(method_family = none)]
391        pub unsafe fn isFollowingExternalSyncDevice(&self) -> bool;
392
393        #[cfg(feature = "objc2-core-media")]
394        /// The minimum frame duration that can be passed as the `videoFrameDuration` when directing your device input to follow an external sync device.
395        ///
396        /// Use this property as the minimum allowable frame duration to pass to ``AVCaptureDeviceInput/follow:externalSyncDevice:videoFrameDuration:delegate:`` when you want to follow an external sync device. This property returns `kCMTimeInvalid` when the device's' current configuration does not support external sync device following.
397        #[unsafe(method(minSupportedExternalSyncFrameDuration))]
398        #[unsafe(method_family = none)]
399        pub unsafe fn minSupportedExternalSyncFrameDuration(&self) -> CMTime;
400
401        /// Indicates whether the receiver should enable auto video frame rate.
402        ///
403        /// When you enable this property, the device automatically adjusts the active frame rate, depending on light level. Under low light conditions, it decreases the frame rate to properly expose the scene. For formats with a maximum frame rate of 30 fps, the device switches the frame rate between 30 - 24. For formats with a maximum frame rate of 60 fps, the device switches the frame rate between 60 - 30 - 24.
404        ///
405        /// Setting this property throws an `NSInvalidArgumentException` if the active format's ``AVCaptureDeviceFormat/autoVideoFrameRateSupported`` returns `false`. When you change the device's active format, this property resets to its default value of `false`.
406        ///
407        /// If you set this property to `true`, frame rate is under device control, and you may not set ``activeVideoMinFrameDuration`` or ``activeVideoMaxFrameDuration``. Doing so throws an `NSInvalidArgumentException`.
408        ///
409        /// - Note: Setting this property to `true` throws an `NSInvalidArgumentException` if ``videoFrameDurationLocked`` or ``followingExternalSyncDevice`` are `true`.
410        #[unsafe(method(isAutoVideoFrameRateEnabled))]
411        #[unsafe(method_family = none)]
412        pub unsafe fn isAutoVideoFrameRateEnabled(&self) -> bool;
413
414        /// Setter for [`isAutoVideoFrameRateEnabled`][Self::isAutoVideoFrameRateEnabled].
415        #[unsafe(method(setAutoVideoFrameRateEnabled:))]
416        #[unsafe(method_family = none)]
417        pub unsafe fn setAutoVideoFrameRateEnabled(&self, auto_video_frame_rate_enabled: bool);
418
419        /// An array of AVCaptureDeviceInputSource objects supported by the receiver.
420        ///
421        ///
422        /// Some devices can capture data from one of multiple data sources (different input jacks on the same audio device, for example). For devices with multiple possible data sources, inputSources can be used to enumerate the possible choices. Clients can observe automatic changes to the receiver's inputSources by key value observing this property.
423        #[unsafe(method(inputSources))]
424        #[unsafe(method_family = none)]
425        pub unsafe fn inputSources(&self) -> Retained<NSArray<AVCaptureDeviceInputSource>>;
426
427        /// The currently active input source of the receiver.
428        ///
429        ///
430        /// This property can be used to get or set the currently active device input source. -setActiveInputSource: throws an NSInvalidArgumentException if set to a value not present in the inputSources array. -setActiveInputSource: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. Clients can observe automatic changes to the receiver's activeInputSource by key value observing this property.
431        #[unsafe(method(activeInputSource))]
432        #[unsafe(method_family = none)]
433        pub unsafe fn activeInputSource(&self) -> Option<Retained<AVCaptureDeviceInputSource>>;
434
435        /// Setter for [`activeInputSource`][Self::activeInputSource].
436        #[unsafe(method(setActiveInputSource:))]
437        #[unsafe(method_family = none)]
438        pub unsafe fn setActiveInputSource(
439            &self,
440            active_input_source: Option<&AVCaptureDeviceInputSource>,
441        );
442    );
443}
444
445/// Constants indicating the physical position of an AVCaptureDevice's hardware on the system.
446///
447///
448/// Indicates that the device's position relative to the system hardware is unspecified.
449///
450/// Indicates that the device is physically located on the back of the system hardware.
451///
452/// Indicates that the device is physically located on the front of the system hardware.
453///
454/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturedeviceposition?language=objc)
455// NS_ENUM
456#[repr(transparent)]
457#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
458pub struct AVCaptureDevicePosition(pub NSInteger);
459impl AVCaptureDevicePosition {
460    #[doc(alias = "AVCaptureDevicePositionUnspecified")]
461    pub const Unspecified: Self = Self(0);
462    #[doc(alias = "AVCaptureDevicePositionBack")]
463    pub const Back: Self = Self(1);
464    #[doc(alias = "AVCaptureDevicePositionFront")]
465    pub const Front: Self = Self(2);
466}
467
468unsafe impl Encode for AVCaptureDevicePosition {
469    const ENCODING: Encoding = NSInteger::ENCODING;
470}
471
472unsafe impl RefEncode for AVCaptureDevicePosition {
473    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
474}
475
476/// AVCaptureDevicePosition.
477impl AVCaptureDevice {
478    extern_methods!(
479        /// Indicates the physical position of an AVCaptureDevice's hardware on the system.
480        ///
481        ///
482        /// The value of this property is an AVCaptureDevicePosition indicating where the receiver's device is physically located on the system hardware.
483        #[unsafe(method(position))]
484        #[unsafe(method_family = none)]
485        pub unsafe fn position(&self) -> AVCaptureDevicePosition;
486    );
487}
488
489/// AVCaptureDeviceType string constants
490///
491///
492/// The AVCaptureDeviceType string constants are intended to be used in combination with the AVCaptureDeviceDiscoverySession class to obtain a list of devices matching certain search criteria.
493///
494/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturedevicetype?language=objc)
495// NS_TYPED_ENUM
496pub type AVCaptureDeviceType = NSString;
497
498extern "C" {
499    /// An external device type. On iPad, external devices are those that conform to the UVC (USB Video Class) specification.
500    ///
501    ///
502    /// Starting in Mac Catalyst 17.0, apps may opt in for using AVCaptureDeviceTypeExternal by adding the following key to their Info.plist:
503    /// <key
504    /// >NSCameraUseExternalDeviceType
505    /// </key
506    /// >
507    /// <true
508    /// />
509    /// Otherwise, external cameras on Mac Catalyst report that their device type is AVCaptureDeviceTypeBuiltInWideAngleCamera.
510    ///
511    /// Prior to visionOS 3.0, your app must have the `com.apple.developer.avfoundation.uvc-device-access` entitlement in order to discover and use devices of type `AVCaptureDeviceTypeExternal` on visionOS.
512    ///
513    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturedevicetypeexternal?language=objc)
514    pub static AVCaptureDeviceTypeExternal: &'static AVCaptureDeviceType;
515}
516
517extern "C" {
518    /// A microphone. On iOS and tvOS, only one AVCaptureDevice of type AVCaptureDeviceTypeMicrophone is exposed to the system. The audio routing subsystem decides which physical microphone to use, be it a built in microphone, a wired headset, an external microphone, etc. The microphone device's `localizedName` will change as the audio subsystem switches to a different physical device.
519    ///
520    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturedevicetypemicrophone?language=objc)
521    pub static AVCaptureDeviceTypeMicrophone: &'static AVCaptureDeviceType;
522}
523
524extern "C" {
525    /// A built-in wide angle camera device. These devices are suitable for general purpose use.
526    ///
527    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturedevicetypebuiltinwideanglecamera?language=objc)
528    pub static AVCaptureDeviceTypeBuiltInWideAngleCamera: &'static AVCaptureDeviceType;
529}
530
531extern "C" {
532    /// A built-in camera device with a longer focal length than a wide angle camera. Note that devices of this type may only be discovered using an AVCaptureDeviceDiscoverySession.
533    ///
534    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturedevicetypebuiltintelephotocamera?language=objc)
535    pub static AVCaptureDeviceTypeBuiltInTelephotoCamera: &'static AVCaptureDeviceType;
536}
537
538extern "C" {
539    /// A built-in camera device with a shorter focal length than a wide angle camera. Note that devices of this type may only be discovered using an AVCaptureDeviceDiscoverySession.
540    ///
541    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturedevicetypebuiltinultrawidecamera?language=objc)
542    pub static AVCaptureDeviceTypeBuiltInUltraWideCamera: &'static AVCaptureDeviceType;
543}
544
545extern "C" {
546    /// A device that consists of two fixed focal length cameras, one wide and one telephoto. Note that devices of this type may only be discovered using an AVCaptureDeviceDiscoverySession or -[AVCaptureDevice defaultDeviceWithDeviceType:mediaType:position:].
547    ///
548    /// A device of this device type supports the following features:
549    /// - Auto switching from one camera to the other when zoom factor, light level, and focus position allow this.
550    /// - Higher quality zoom for still captures by fusing images from both cameras.
551    /// - Depth data delivery by measuring the disparity of matched features between the wide and telephoto cameras.
552    /// - Delivery of photos from constituent devices (wide and telephoto cameras) via a single photo capture request.
553    ///
554    /// A device of this device type does not support the following features:
555    /// - AVCaptureExposureModeCustom and manual exposure bracketing.
556    /// - Locking focus with a lens position other than AVCaptureLensPositionCurrent.
557    /// - Locking auto white balance with device white balance gains other than AVCaptureWhiteBalanceGainsCurrent.
558    ///
559    /// Even when locked, exposure duration, ISO, aperture, white balance gains, or lens position may change when the device switches from one camera to the other. The overall exposure, white balance, and focus position however should be consistent.
560    ///
561    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturedevicetypebuiltindualcamera?language=objc)
562    pub static AVCaptureDeviceTypeBuiltInDualCamera: &'static AVCaptureDeviceType;
563}
564
565extern "C" {
566    /// A device that consists of two fixed focal length cameras, one ultra wide and one wide angle. Note that devices of this type may only be discovered using an AVCaptureDeviceDiscoverySession or -[AVCaptureDevice defaultDeviceWithDeviceType:mediaType:position:].
567    ///
568    /// A device of this device type supports the following features:
569    /// - Auto switching from one camera to the other when zoom factor, light level, and focus position allow this.
570    /// - Depth data delivery by measuring the disparity of matched features between the ultra wide and wide cameras.
571    /// - Delivery of photos from constituent devices (ultra wide and wide) via a single photo capture request.
572    ///
573    /// A device of this device type does not support the following features:
574    /// - AVCaptureExposureModeCustom and manual exposure bracketing.
575    /// - Locking focus with a lens position other than AVCaptureLensPositionCurrent.
576    /// - Locking auto white balance with device white balance gains other than AVCaptureWhiteBalanceGainsCurrent.
577    ///
578    /// Even when locked, exposure duration, ISO, aperture, white balance gains, or lens position may change when the device switches from one camera to the other. The overall exposure, white balance, and focus position however should be consistent.
579    ///
580    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturedevicetypebuiltindualwidecamera?language=objc)
581    pub static AVCaptureDeviceTypeBuiltInDualWideCamera: &'static AVCaptureDeviceType;
582}
583
584extern "C" {
585    /// A device that consists of three fixed focal length cameras, one ultra wide, one wide angle, and one telephoto. Note that devices of this type may only be discovered using an AVCaptureDeviceDiscoverySession or -[AVCaptureDevice defaultDeviceWithDeviceType:mediaType:position:].
586    ///
587    /// A device of this device type supports the following features:
588    /// - Auto switching from one camera to the other when zoom factor, light level, and focus position allow this.
589    /// - Delivery of photos from constituent devices (ultra wide, wide and telephoto cameras) via a single photo capture request.
590    ///
591    /// A device of this device type does not support the following features:
592    /// - AVCaptureExposureModeCustom and manual exposure bracketing.
593    /// - Locking focus with a lens position other than AVCaptureLensPositionCurrent.
594    /// - Locking auto white balance with device white balance gains other than AVCaptureWhiteBalanceGainsCurrent.
595    ///
596    /// Even when locked, exposure duration, ISO, aperture, white balance gains, or lens position may change when the device switches from one camera to the other. The overall exposure, white balance, and focus position however should be consistent.
597    ///
598    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturedevicetypebuiltintriplecamera?language=objc)
599    pub static AVCaptureDeviceTypeBuiltInTripleCamera: &'static AVCaptureDeviceType;
600}
601
602extern "C" {
603    /// A device that consists of two cameras, one YUV and one Infrared. The infrared camera provides high quality depth information that is synchronized and perspective corrected to frames produced by the YUV camera. While the resolution of the depth data and YUV frames may differ, their field of view and aspect ratio always match. Note that devices of this type may only be discovered using an AVCaptureDeviceDiscoverySession or -[AVCaptureDevice defaultDeviceWithDeviceType:mediaType:position:].
604    ///
605    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturedevicetypebuiltintruedepthcamera?language=objc)
606    pub static AVCaptureDeviceTypeBuiltInTrueDepthCamera: &'static AVCaptureDeviceType;
607}
608
609extern "C" {
610    /// A device that consists of two cameras, one YUV and one LiDAR. The LiDAR camera provides high quality, high accuracy depth information by measuring the round trip of an artificial light signal emitted by a laser. The depth is synchronized and perspective corrected to frames produced by the paired YUV camera. While the resolution of the depth data and YUV frames may differ, their field of view and aspect ratio always match. Note that devices of this type may only be discovered using an AVCaptureDeviceDiscoverySession or -[AVCaptureDevice defaultDeviceWithDeviceType:mediaType:position:].
611    ///
612    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturedevicetypebuiltinlidardepthcamera?language=objc)
613    pub static AVCaptureDeviceTypeBuiltInLiDARDepthCamera: &'static AVCaptureDeviceType;
614}
615
616extern "C" {
617    /// A continuity camera device. These devices are suitable for general purpose use. Note that devices of this type may only be discovered using an AVCaptureDeviceDiscoverySession or -[AVCaptureDevice defaultDeviceWithDeviceType:mediaType:position:].
618    ///
619    ///
620    /// Starting in macOS 14.0 and Mac Catalyst 17.0, apps may opt in for using AVCaptureDeviceTypeContinuityCamera by adding the following key to their Info.plist:
621    /// <key
622    /// >NSCameraUseContinuityCameraDeviceType
623    /// </key
624    /// >
625    /// <true
626    /// />
627    ///
628    /// Otherwise, continuity cameras on macOS and Mac Catalyst report that their device type is AVCaptureDeviceTypeBuiltInWideAngleCamera.
629    ///
630    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturedevicetypecontinuitycamera?language=objc)
631    pub static AVCaptureDeviceTypeContinuityCamera: &'static AVCaptureDeviceType;
632}
633
634extern "C" {
635    /// A distortion corrected cut out from an ultra wide camera, made to approximate an overhead camera pointing at a desk. Supports multicam operation.
636    ///
637    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturedevicetypedeskviewcamera?language=objc)
638    pub static AVCaptureDeviceTypeDeskViewCamera: &'static AVCaptureDeviceType;
639}
640
641extern "C" {
642    /// A deprecated synonym for AVCaptureDeviceTypeExternal. Please use AVCaptureDeviceTypeExternal instead.
643    ///
644    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturedevicetypeexternalunknown?language=objc)
645    #[deprecated]
646    pub static AVCaptureDeviceTypeExternalUnknown: &'static AVCaptureDeviceType;
647}
648
649extern "C" {
650    /// A deprecated synonym for AVCaptureDeviceTypeBuiltInDualCamera. Please use AVCaptureDeviceTypeBuiltInDualCamera instead.
651    ///
652    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturedevicetypebuiltinduocamera?language=objc)
653    #[deprecated = "Use AVCaptureDeviceTypeBuiltInDualCamera instead."]
654    pub static AVCaptureDeviceTypeBuiltInDuoCamera: &'static AVCaptureDeviceType;
655}
656
657extern "C" {
658    /// A deprecated synonym for AVCaptureDeviceTypeMicrophone. Please use AVCaptureDeviceTypeMicrophone instead.
659    ///
660    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturedevicetypebuiltinmicrophone?language=objc)
661    #[deprecated]
662    pub static AVCaptureDeviceTypeBuiltInMicrophone: &'static AVCaptureDeviceType;
663}
664
665/// AVCaptureDeviceType.
666impl AVCaptureDevice {
667    extern_methods!(
668        /// The type of the capture device.
669        ///
670        ///
671        /// A capture device's type never changes.
672        #[unsafe(method(deviceType))]
673        #[unsafe(method_family = none)]
674        pub unsafe fn deviceType(&self) -> Retained<AVCaptureDeviceType>;
675    );
676}
677
678/// AVCaptureDefaultDevice.
679impl AVCaptureDevice {
680    extern_methods!(
681        #[cfg(feature = "AVMediaFormat")]
682        /// Returns an AVCaptureDevice instance for the default device of the given device type, media type, and position.
683        ///
684        ///
685        /// Parameter `deviceType`: The device type supported by the returned device. It must be a valid AVCaptureDeviceType.
686        ///
687        /// Parameter `mediaType`: The media type, such as AVMediaTypeVideo, AVMediaTypeAudio, or AVMediaTypeMuxed, supported by the returned device. Pass nil to consider devices with any media type.
688        ///
689        /// Parameter `position`: The position supported by the returned device. Pass AVCaptureDevicePositionUnspecified to consider devices with any position.
690        ///
691        /// Returns: The default device with the given device type, media type and position or nil if no device with that media type exists and nil otherwise.
692        ///
693        ///
694        /// This method returns the default device of the given combination of device type, media type, and position currently available on the system.
695        #[unsafe(method(defaultDeviceWithDeviceType:mediaType:position:))]
696        #[unsafe(method_family = none)]
697        pub unsafe fn defaultDeviceWithDeviceType_mediaType_position(
698            device_type: &AVCaptureDeviceType,
699            media_type: Option<&AVMediaType>,
700            position: AVCaptureDevicePosition,
701        ) -> Option<Retained<AVCaptureDevice>>;
702    );
703}
704
705/// AVCaptureDevicePreferredCamera.
706impl AVCaptureDevice {
707    extern_methods!(
708        /// Settable property that specifies a user preferred camera.
709        ///
710        ///
711        /// Setting this property allows an application to persist its user’s preferred camera across app launches and reboots. The property internally maintains a short history, so if your user’s most recent preferred camera is not currently connected, it still reports the next best choice. This property always returns a device that is present. If no camera is available nil is returned. Setting the property to nil has no effect.
712        #[unsafe(method(userPreferredCamera))]
713        #[unsafe(method_family = none)]
714        pub unsafe fn userPreferredCamera() -> Option<Retained<AVCaptureDevice>>;
715
716        /// Setter for [`userPreferredCamera`][Self::userPreferredCamera].
717        #[unsafe(method(setUserPreferredCamera:))]
718        #[unsafe(method_family = none)]
719        pub unsafe fn setUserPreferredCamera(user_preferred_camera: Option<&AVCaptureDevice>);
720
721        /// Specifies the best camera to use as determined by the system.
722        ///
723        ///
724        /// Apple chooses the default value. This property incorporates userPreferredCamera as well as other factors, such as camera suspension and Apple cameras appearing that should be automatically chosen. The property may change spontaneously, such as when the preferred camera goes away. This property always returns a device that is present. If no camera is available nil is returned.
725        ///
726        /// Applications that adopt this API should always key-value observe this property and update their AVCaptureSession’s input device to reflect changes to the systemPreferredCamera. The application can still offer users the ability to pick a camera by setting userPreferredCamera, which will cause the systemPreferredCamera API to put the user’s choice first until either another Apple-preferred device becomes available or the machine is rebooted (after which it reverts to its original behavior of returning the internally determined best camera to use).
727        ///
728        /// If the application wishes to offer users a fully manual camera selection mode in addition to automatic camera selection, it is recommended to call setUserPreferredCamera: each time the user makes a camera selection, but ignore key-value observer updates to systemPreferredCamera while in manual selection mode.
729        #[unsafe(method(systemPreferredCamera))]
730        #[unsafe(method_family = none)]
731        pub unsafe fn systemPreferredCamera() -> Option<Retained<AVCaptureDevice>>;
732    );
733}
734
735/// AVCaptureDeviceSystemPressure.
736impl AVCaptureDevice {
737    extern_methods!(
738        #[cfg(feature = "AVCaptureSystemPressure")]
739        /// A key-value observable property indicating the capture device's current system pressure state.
740        ///
741        ///
742        /// This property indicates whether the capture device is currently subject to an elevated system pressure condition. When system pressure reaches AVCaptureSystemPressureLevelShutdown, the capture device cannot continue to provide input, so the AVCaptureSession becomes interrupted until the pressured state abates. System pressure can be effectively mitigated by lowering the device's activeVideoMinFrameDuration in response to changes in the systemPressureState. Clients are encouraged to implement frame rate throttling to bring system pressure down if their capture use case can tolerate a reduced frame rate.
743        #[unsafe(method(systemPressureState))]
744        #[unsafe(method_family = none)]
745        pub unsafe fn systemPressureState(&self) -> Retained<AVCaptureSystemPressureState>;
746    );
747}
748
749/// These constants can be used to control when the virtual device is allowed to switch the active primary constituent device.
750///
751///
752/// Indicates that the device does not support constituent device switching. This is reported for cameras that do not have more than one constituent device.
753///
754/// Automatically select the best camera for the current scene. In this mode there are no restrictions on when a camera switch can occur.
755///
756/// Restrict fallback camera selection to certain conditions (see AVCapturePrimaryConstituentDeviceRestrictedSwitchingBehaviorConditions). Camera switches necessary to satisfy the requested video zoom factor are still allowed without restriction.
757///
758/// Lock camera switching to the active primary constituent device. Note that this restricts the minAvailableVideoZoomFactor to the switch-over zoom factor of the activePrimaryConstituentDevice (as reported in AVCaptureDevice.virtualDeviceSwitchOverVideoZoomFactors).
759///
760///
761/// Virtual devices with multiple constituent video devices (such as the Dual Camera, Dual Wide Camera, or Triple Camera), consist of cameras that each have different properties such as focal length, maximum light sensitivity, and minimum focus distance. One of the constituent video devices is selected as the primary constituent device. For an AVCaptureSession, the primary constituent device produces for all outputs. For an AVCaptureMultiCamSession, the primary constituent device produces for all outputs connected to the virtual device's native AVCaptureDeviceInputPort (where its sourceDeviceType is equal to the virtual device's deviceType).
762///
763/// When the requested zoom factor can be achieved by multiple constituent cameras (see -virtualDeviceSwitchOverVideoZoomFactors), the virtual device chooses the best camera for the scene. The primary condition for this is the focal length; the camera with the longest focal length requires the least amount of digital upscaling and therefore normally provides the highest image quality. Secondary conditions are focus and exposure; when the scene requires focus or exposure to go beyond the limits of the active primary constituent device, a camera with a shorter focal length may be able to deliver a better quality image. Such a device is called a fallback primary constituent device. For example, a telephoto camera with a minimum focus distance of 40cm is not able to deliver a sharp image when the subject in the scene is closer than 40cm. For such a scene, the virtual device will switch to the wide-angle camera which typically has a smaller minimum focus distance and is able to achieve accurate focus on the subject. In this case the wide-angle camera is the fallback primary constitute device.
764///
765/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcaptureprimaryconstituentdeviceswitchingbehavior?language=objc)
766// NS_ENUM
767#[repr(transparent)]
768#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
769pub struct AVCapturePrimaryConstituentDeviceSwitchingBehavior(pub NSInteger);
770impl AVCapturePrimaryConstituentDeviceSwitchingBehavior {
771    #[doc(alias = "AVCapturePrimaryConstituentDeviceSwitchingBehaviorUnsupported")]
772    pub const Unsupported: Self = Self(0);
773    #[doc(alias = "AVCapturePrimaryConstituentDeviceSwitchingBehaviorAuto")]
774    pub const Auto: Self = Self(1);
775    #[doc(alias = "AVCapturePrimaryConstituentDeviceSwitchingBehaviorRestricted")]
776    pub const Restricted: Self = Self(2);
777    #[doc(alias = "AVCapturePrimaryConstituentDeviceSwitchingBehaviorLocked")]
778    pub const Locked: Self = Self(3);
779}
780
781unsafe impl Encode for AVCapturePrimaryConstituentDeviceSwitchingBehavior {
782    const ENCODING: Encoding = NSInteger::ENCODING;
783}
784
785unsafe impl RefEncode for AVCapturePrimaryConstituentDeviceSwitchingBehavior {
786    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
787}
788
789/// These constants can be used and combined to control the conditions that allow fallback camera selection when the primaryConstituentDeviceSelectionBehavior is set to AVCapturePrimaryConstituentDeviceSwitchingBehaviorRestricted. Note that camera switching necessary to satisfy the requested zoom factor is still allowed.
790///
791///
792/// Disallow fallback switching.
793///
794/// Restrict fallback camera switching to when the video zoom factor changes, either through AVCaptureDevice.videoZoomFactor or -[AVCaptureDevice rampToVideoZoomFactor:withRate:]. Note that any change in video zoom factor will allow a switch to a fallback camera, not just changes across switch-over zoom factors.
795///
796/// Restrict fallback camera switches to when AVCaptureDevice.focusMode is set.
797///
798/// Restrict fallback camera switches to when AVCaptureDevice.exposureMode is set.
799///
800///
801/// Whenever triggered by one or more of the enabled conditions, the fallback camera switching waits for exposure and focus to stabilize before deciding which camera to use as the primary constituent device.
802///
803/// Whenever AVCapturePrimaryConstituentDeviceRestrictedSwitchingBehaviorConditionVideoZoomChanged is not included in the restricted switching behavior conditions, AVCapturePrimaryConstituentDeviceSwitchingBehaviorRestricted still allows camera selection when a change in video zoom factor makes a camera eligible or ineligible to be selected as the activePrimaryConstituentDevice. When the video zoom factor decreases to below the switch-over zoom factor of the activePrimaryConstituentDevice, a different camera will be selected to satisfy the requested zoom factor. When the video zoom factor increases and crosses a camera's switch-over zoom factor, this camera becomes eligible to be selected as the activePrimaryConstituentDevice. If exposure and focus allow, this camera then becomes the new activePrimaryConstituentDevice. Similar to the AVCapturePrimaryConstituentDeviceRestrictedSwitchingBehaviorConditionVideoZoomChanged this also waits for exposure and focus to stabilize. Otherwise the activePrimaryConstituentDevice remains unchanged.
804///
805/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcaptureprimaryconstituentdevicerestrictedswitchingbehaviorconditions?language=objc)
806// NS_OPTIONS
807#[repr(transparent)]
808#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
809pub struct AVCapturePrimaryConstituentDeviceRestrictedSwitchingBehaviorConditions(pub NSUInteger);
810bitflags::bitflags! {
811    impl AVCapturePrimaryConstituentDeviceRestrictedSwitchingBehaviorConditions: NSUInteger {
812        #[doc(alias = "AVCapturePrimaryConstituentDeviceRestrictedSwitchingBehaviorConditionNone")]
813        const None = 0;
814        #[doc(alias = "AVCapturePrimaryConstituentDeviceRestrictedSwitchingBehaviorConditionVideoZoomChanged")]
815        const VideoZoomChanged = 1<<0;
816        #[doc(alias = "AVCapturePrimaryConstituentDeviceRestrictedSwitchingBehaviorConditionFocusModeChanged")]
817        const FocusModeChanged = 1<<1;
818        #[doc(alias = "AVCapturePrimaryConstituentDeviceRestrictedSwitchingBehaviorConditionExposureModeChanged")]
819        const ExposureModeChanged = 1<<2;
820    }
821}
822
823unsafe impl Encode for AVCapturePrimaryConstituentDeviceRestrictedSwitchingBehaviorConditions {
824    const ENCODING: Encoding = NSUInteger::ENCODING;
825}
826
827unsafe impl RefEncode for AVCapturePrimaryConstituentDeviceRestrictedSwitchingBehaviorConditions {
828    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
829}
830
831/// AVCaptureDeviceVirtual.
832impl AVCaptureDevice {
833    extern_methods!(
834        /// A property indicating whether the receiver is a virtual device consisting of constituent physical devices.
835        ///
836        ///
837        /// Two examples of virtual devices are:
838        /// The Dual Camera, which supports seamlessly switching between a wide and telephoto camera while zooming and generating depth data from the disparities between the different points of view of the physical cameras.
839        /// The TrueDepth Camera, which generates depth data from disparities between a YUV camera and an Infrared camera pointed in the same direction.
840        #[unsafe(method(isVirtualDevice))]
841        #[unsafe(method_family = none)]
842        pub unsafe fn isVirtualDevice(&self) -> bool;
843
844        /// An array of constituent physical devices comprising a virtual device.
845        ///
846        ///
847        /// When called on a device for which virtualDevice == NO, an empty array is returned.
848        #[unsafe(method(constituentDevices))]
849        #[unsafe(method_family = none)]
850        pub unsafe fn constituentDevices(&self) -> Retained<NSArray<AVCaptureDevice>>;
851
852        /// An array of video zoom factors at or above which a virtual device (such as the Dual Camera) may switch to its next constituent device.
853        ///
854        ///
855        /// This array contains zoom factors at which one of the constituent device's field of view matches the next constituent device's full field of view. The number of switch over video zoom factors is always one less than the count of the constituentDevices property, and the factors progress in the same order as the devices listed in that property. On non-virtual devices this property returns an empty array.
856        #[unsafe(method(virtualDeviceSwitchOverVideoZoomFactors))]
857        #[unsafe(method_family = none)]
858        pub unsafe fn virtualDeviceSwitchOverVideoZoomFactors(&self)
859            -> Retained<NSArray<NSNumber>>;
860
861        /// The switching behavior and conditions, unless overwritten via -[AVCaptureMovieFileOutput setPrimaryConstituentDeviceSwitchingBehavior:restrictedSwitchingBehaviorConditions].
862        ///
863        /// Parameter `switchingBehavior`: The desired switching behavior.
864        ///
865        /// Parameter `restrictedSwitchingBehaviorConditions`: The desired conditions for restricting camera switching. This must be set to AVCapturePrimaryConstituentDeviceRestrictedSwitchingBehaviorConditionNone whenever switchingBehavior is not equal to AVCapturePrimaryConstituentDeviceSwitchingBehaviorRestricted.
866        ///
867        ///
868        /// The switching behavior may be overridden on the AVCaptureMovieFileOutput while recording (see -[AVCaptureMovieFileOutput setPrimaryConstituentDeviceSwitchingBehavior:restrictedSwitchingBehaviorConditions]). This method throws an NSInvalidArgumentException if constituent device switching is not supported by the receiver or if restrictedSwitchingBehaviorConditions is not equal to AVCapturePrimaryConstituentDeviceRestrictedSwitchingBehaviorConditionNone and switchingBehavior is not equal to AVCapturePrimaryConstituentDeviceSwitchingBehaviorRestricted.
869        #[unsafe(method(setPrimaryConstituentDeviceSwitchingBehavior:restrictedSwitchingBehaviorConditions:))]
870        #[unsafe(method_family = none)]
871        pub unsafe fn setPrimaryConstituentDeviceSwitchingBehavior_restrictedSwitchingBehaviorConditions(
872            &self,
873            switching_behavior: AVCapturePrimaryConstituentDeviceSwitchingBehavior,
874            restricted_switching_behavior_conditions: AVCapturePrimaryConstituentDeviceRestrictedSwitchingBehaviorConditions,
875        );
876
877        /// The primaryConstituentDeviceSwitchingBehavior as set by -[AVCaptureDevice setPrimaryConstituentDeviceSwitchingBehavior:restrictedSwitchingBehaviorConditions:].
878        ///
879        ///
880        /// By default, this property is set to AVCapturePrimaryConstituentDeviceSwitchingBehaviorAuto for AVCaptureDevices that support it.  This property is key-value observable.
881        #[unsafe(method(primaryConstituentDeviceSwitchingBehavior))]
882        #[unsafe(method_family = none)]
883        pub unsafe fn primaryConstituentDeviceSwitchingBehavior(
884            &self,
885        ) -> AVCapturePrimaryConstituentDeviceSwitchingBehavior;
886
887        /// The primaryConstituentDeviceRestrictedSwitchingBehaviorConditions as set by -[AVCaptureDevice setPrimaryConstituentDeviceSwitchingBehavior:restrictedSwitchingBehaviorConditions:].
888        ///
889        ///
890        /// By default, this propety is set to AVCapturePrimaryConstituentDeviceRestrictedSwitchingBehaviorConditionNone. This property is key-value observable.
891        #[unsafe(method(primaryConstituentDeviceRestrictedSwitchingBehaviorConditions))]
892        #[unsafe(method_family = none)]
893        pub unsafe fn primaryConstituentDeviceRestrictedSwitchingBehaviorConditions(
894            &self,
895        ) -> AVCapturePrimaryConstituentDeviceRestrictedSwitchingBehaviorConditions;
896
897        /// The active constituent device switching behavior.
898        ///
899        ///
900        /// For virtual devices with multiple constituent devices, this property returns the active switching behavior. This is equal to primaryConstituentDeviceSwitchingBehavior except while recording using an AVCaptureMovieFileOutput configured with a different switching behavior (see -[AVCaptureMovieFileOutput setPrimaryConstituentDeviceSwitchingBehavior:restrictedSwitchingBehaviorConditions]). Devices that do not support constituent device switching return AVCapturePrimaryConstituentDeviceSwitchingBehaviorUnsupported. This property is key-value observable.
901        #[unsafe(method(activePrimaryConstituentDeviceSwitchingBehavior))]
902        #[unsafe(method_family = none)]
903        pub unsafe fn activePrimaryConstituentDeviceSwitchingBehavior(
904            &self,
905        ) -> AVCapturePrimaryConstituentDeviceSwitchingBehavior;
906
907        /// The active constituent device restricted  switching behavior.
908        ///
909        ///
910        /// For virtual devices with multiple constituent devices, this property returns the active restricted switching behavior conditions. This is equal to primaryConstituentDeviceRestrictedSwitchingBehaviorConditions except while recording using an AVCaptureMovieFileOutput configured with different restricted switching behavior conditions (see -[AVCaptureMovieFileOutput setPrimaryConstituentDeviceSwitchingBehaviorForRecording:restrictedSwitchingBehaviorConditions]). Devices that do not support constituent device switching return AVCapturePrimaryConstituentDeviceRestrictedSwitchingBehaviorConditionNone. This property is key-value observable.
911        #[unsafe(method(activePrimaryConstituentDeviceRestrictedSwitchingBehaviorConditions))]
912        #[unsafe(method_family = none)]
913        pub unsafe fn activePrimaryConstituentDeviceRestrictedSwitchingBehaviorConditions(
914            &self,
915        ) -> AVCapturePrimaryConstituentDeviceRestrictedSwitchingBehaviorConditions;
916
917        /// For virtual devices, this property indicates which constituent device is currently the primary constituent device. The primary constituent device may change when zoom, exposure, or focus changes.
918        ///
919        ///
920        /// This property returns nil for non-virtual devices. On virtual devices this property returns nil until the device is used in a running AVCaptureSession. This property is key-value observable.
921        #[unsafe(method(activePrimaryConstituentDevice))]
922        #[unsafe(method_family = none)]
923        pub unsafe fn activePrimaryConstituentDevice(&self) -> Option<Retained<AVCaptureDevice>>;
924
925        /// The constituent devices that may be selected as a fallback for a longer focal length primary constituent device.
926        ///
927        ///
928        /// This property returns an empty array for non-virtual devices. This property never changes for a given virtual device.
929        #[unsafe(method(supportedFallbackPrimaryConstituentDevices))]
930        #[unsafe(method_family = none)]
931        pub unsafe fn supportedFallbackPrimaryConstituentDevices(
932            &self,
933        ) -> Retained<NSArray<AVCaptureDevice>>;
934
935        /// The constituent devices that may be used as a fallback device when a constituent device with a longer focal length becomes limited by its light sensitivity or minimum focus distance.
936        ///
937        ///
938        /// This may only be set to the supportedFallbackPrimaryConstituentDevices or a subset thereof. By default this is set to all supportedFallbackPrimaryConstituentDevices. This property will throw an NSInvalidArgumentException if the array includes any device not reported in supportedFallbackPrimaryConstituentDevices. This property is key-value observable.
939        #[unsafe(method(fallbackPrimaryConstituentDevices))]
940        #[unsafe(method_family = none)]
941        pub unsafe fn fallbackPrimaryConstituentDevices(
942            &self,
943        ) -> Retained<NSArray<AVCaptureDevice>>;
944
945        /// Setter for [`fallbackPrimaryConstituentDevices`][Self::fallbackPrimaryConstituentDevices].
946        #[unsafe(method(setFallbackPrimaryConstituentDevices:))]
947        #[unsafe(method_family = none)]
948        pub unsafe fn setFallbackPrimaryConstituentDevices(
949            &self,
950            fallback_primary_constituent_devices: &NSArray<AVCaptureDevice>,
951        );
952    );
953}
954
955/// Constants indicating the mode of the flash on the receiver's device, if it has one.
956///
957///
958/// Indicates that the flash should always be off.
959///
960/// Indicates that the flash should always be on.
961///
962/// Indicates that the flash should be used automatically depending on ambient light conditions.
963///
964/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcaptureflashmode?language=objc)
965// NS_ENUM
966#[repr(transparent)]
967#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
968pub struct AVCaptureFlashMode(pub NSInteger);
969impl AVCaptureFlashMode {
970    #[doc(alias = "AVCaptureFlashModeOff")]
971    pub const Off: Self = Self(0);
972    #[doc(alias = "AVCaptureFlashModeOn")]
973    pub const On: Self = Self(1);
974    #[doc(alias = "AVCaptureFlashModeAuto")]
975    pub const Auto: Self = Self(2);
976}
977
978unsafe impl Encode for AVCaptureFlashMode {
979    const ENCODING: Encoding = NSInteger::ENCODING;
980}
981
982unsafe impl RefEncode for AVCaptureFlashMode {
983    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
984}
985
986/// AVCaptureDeviceFlash.
987impl AVCaptureDevice {
988    extern_methods!(
989        /// Indicates whether the receiver has a flash.
990        ///
991        ///
992        /// The value of this property is a BOOL indicating whether the receiver has a flash. The receiver's flashMode property can only be set when this property returns YES.
993        #[unsafe(method(hasFlash))]
994        #[unsafe(method_family = none)]
995        pub unsafe fn hasFlash(&self) -> bool;
996
997        /// Indicates whether the receiver's flash is currently available for use.
998        ///
999        ///
1000        /// The value of this property is a BOOL indicating whether the receiver's flash is currently available. The flash may become unavailable if, for example, the device overheats and needs to cool off. This property is key-value observable.
1001        #[unsafe(method(isFlashAvailable))]
1002        #[unsafe(method_family = none)]
1003        pub unsafe fn isFlashAvailable(&self) -> bool;
1004
1005        /// Indicates whether the receiver's flash is currently active.
1006        ///
1007        ///
1008        /// The value of this property is a BOOL indicating whether the receiver's flash is currently active. When the flash is active, it will flash if a still image is captured. When a still image is captured with the flash active, exposure and white balance settings are overridden for the still. This is true even when using AVCaptureExposureModeCustom and/or AVCaptureWhiteBalanceModeLocked. This property is key-value observable.
1009        #[deprecated = "Use AVCapturePhotoOutput's -isFlashScene instead."]
1010        #[unsafe(method(isFlashActive))]
1011        #[unsafe(method_family = none)]
1012        pub unsafe fn isFlashActive(&self) -> bool;
1013
1014        /// Returns whether the receiver supports the given flash mode.
1015        ///
1016        ///
1017        /// Parameter `flashMode`: An AVCaptureFlashMode to be checked.
1018        ///
1019        /// Returns: YES if the receiver supports the given flash mode, NO otherwise.
1020        ///
1021        ///
1022        /// The receiver's flashMode property can only be set to a certain mode if this method returns YES for that mode.
1023        #[deprecated = "Use AVCapturePhotoOutput's -supportedFlashModes instead."]
1024        #[unsafe(method(isFlashModeSupported:))]
1025        #[unsafe(method_family = none)]
1026        pub unsafe fn isFlashModeSupported(&self, flash_mode: AVCaptureFlashMode) -> bool;
1027
1028        /// Indicates current mode of the receiver's flash, if it has one.
1029        ///
1030        ///
1031        /// The value of this property is an AVCaptureFlashMode that determines the mode of the receiver's flash, if it has one. -setFlashMode: throws an NSInvalidArgumentException if set to an unsupported value (see -isFlashModeSupported:). -setFlashMode: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. Clients can observe automatic changes to the receiver's flashMode by key value observing this property.
1032        ///
1033        /// When using AVCapturePhotoOutput, AVCaptureDevice's flashMode property is ignored. You specify flashMode on a per photo basis by setting the AVCapturePhotoSettings.flashMode property.
1034        #[deprecated = "Use AVCapturePhotoSettings.flashMode instead."]
1035        #[unsafe(method(flashMode))]
1036        #[unsafe(method_family = none)]
1037        pub unsafe fn flashMode(&self) -> AVCaptureFlashMode;
1038
1039        /// Setter for [`flashMode`][Self::flashMode].
1040        #[deprecated = "Use AVCapturePhotoSettings.flashMode instead."]
1041        #[unsafe(method(setFlashMode:))]
1042        #[unsafe(method_family = none)]
1043        pub unsafe fn setFlashMode(&self, flash_mode: AVCaptureFlashMode);
1044    );
1045}
1046
1047/// Constants indicating the mode of the torch on the receiver's device, if it has one.
1048///
1049///
1050/// Indicates that the torch should always be off.
1051///
1052/// Indicates that the torch should always be on.
1053///
1054/// Indicates that the torch should be used automatically depending on ambient light conditions.
1055///
1056/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturetorchmode?language=objc)
1057// NS_ENUM
1058#[repr(transparent)]
1059#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
1060pub struct AVCaptureTorchMode(pub NSInteger);
1061impl AVCaptureTorchMode {
1062    #[doc(alias = "AVCaptureTorchModeOff")]
1063    pub const Off: Self = Self(0);
1064    #[doc(alias = "AVCaptureTorchModeOn")]
1065    pub const On: Self = Self(1);
1066    #[doc(alias = "AVCaptureTorchModeAuto")]
1067    pub const Auto: Self = Self(2);
1068}
1069
1070unsafe impl Encode for AVCaptureTorchMode {
1071    const ENCODING: Encoding = NSInteger::ENCODING;
1072}
1073
1074unsafe impl RefEncode for AVCaptureTorchMode {
1075    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
1076}
1077
1078extern "C" {
1079    /// A special value that may be passed to -setTorchModeWithLevel:error: to set the torch to the maximum level currently available. Under thermal duress, the maximum available torch level may be less than 1.0.
1080    ///
1081    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturemaxavailabletorchlevel?language=objc)
1082    pub static AVCaptureMaxAvailableTorchLevel: c_float;
1083}
1084
1085/// AVCaptureDeviceTorch.
1086impl AVCaptureDevice {
1087    extern_methods!(
1088        /// Indicates whether the receiver has a torch.
1089        ///
1090        ///
1091        /// The value of this property is a BOOL indicating whether the receiver has a torch. The receiver's torchMode property can only be set when this property returns YES.
1092        #[unsafe(method(hasTorch))]
1093        #[unsafe(method_family = none)]
1094        pub unsafe fn hasTorch(&self) -> bool;
1095
1096        /// Indicates whether the receiver's torch is currently available for use.
1097        ///
1098        ///
1099        /// The value of this property is a BOOL indicating whether the receiver's torch is currently available. The torch may become unavailable if, for example, the device overheats and needs to cool off. This property is key-value observable.
1100        #[unsafe(method(isTorchAvailable))]
1101        #[unsafe(method_family = none)]
1102        pub unsafe fn isTorchAvailable(&self) -> bool;
1103
1104        /// Indicates whether the receiver's torch is currently active.
1105        ///
1106        ///
1107        /// The value of this property is a BOOL indicating whether the receiver's torch is currently active. If the current torchMode is AVCaptureTorchModeAuto and isTorchActive is YES, the torch will illuminate once a recording starts (see AVCaptureOutput.h -startRecordingToOutputFileURL:recordingDelegate:). This property is key-value observable.
1108        #[unsafe(method(isTorchActive))]
1109        #[unsafe(method_family = none)]
1110        pub unsafe fn isTorchActive(&self) -> bool;
1111
1112        /// Indicates the receiver's current torch brightness level as a floating point value.
1113        ///
1114        ///
1115        /// The value of this property is a float indicating the receiver's torch level from 0.0 (off) -> 1.0 (full). This property is key-value observable.
1116        #[unsafe(method(torchLevel))]
1117        #[unsafe(method_family = none)]
1118        pub unsafe fn torchLevel(&self) -> c_float;
1119
1120        /// Returns whether the receiver supports the given torch mode.
1121        ///
1122        ///
1123        /// Parameter `torchMode`: An AVCaptureTorchMode to be checked.
1124        ///
1125        /// Returns: YES if the receiver supports the given torch mode, NO otherwise.
1126        ///
1127        ///
1128        /// The receiver's torchMode property can only be set to a certain mode if this method returns YES for that mode.
1129        #[unsafe(method(isTorchModeSupported:))]
1130        #[unsafe(method_family = none)]
1131        pub unsafe fn isTorchModeSupported(&self, torch_mode: AVCaptureTorchMode) -> bool;
1132
1133        /// Indicates current mode of the receiver's torch, if it has one.
1134        ///
1135        ///
1136        /// The value of this property is an AVCaptureTorchMode that determines the mode of the receiver's torch, if it has one. -setTorchMode: throws an NSInvalidArgumentException if set to an unsupported value (see -isTorchModeSupported:). -setTorchMode: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. Clients can observe automatic changes to the receiver's torchMode by key value observing this property.
1137        #[unsafe(method(torchMode))]
1138        #[unsafe(method_family = none)]
1139        pub unsafe fn torchMode(&self) -> AVCaptureTorchMode;
1140
1141        /// Setter for [`torchMode`][Self::torchMode].
1142        #[unsafe(method(setTorchMode:))]
1143        #[unsafe(method_family = none)]
1144        pub unsafe fn setTorchMode(&self, torch_mode: AVCaptureTorchMode);
1145
1146        /// Sets the current mode of the receiver's torch to AVCaptureTorchModeOn at the specified level.
1147        ///
1148        ///
1149        /// This method sets the torch mode to AVCaptureTorchModeOn at a specified level. torchLevel must be a value between 0 and 1, or the special value AVCaptureMaxAvailableTorchLevel. The specified value may not be available if the iOS device is too hot. This method throws an NSInvalidArgumentException if set to an unsupported level. If the specified level is valid, but unavailable, the method returns NO with AVErrorTorchLevelUnavailable. -setTorchModeOnWithLevel:error: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. Clients can observe automatic changes to the receiver's torchMode by key value observing the torchMode property.
1150        #[unsafe(method(setTorchModeOnWithLevel:error:_))]
1151        #[unsafe(method_family = none)]
1152        pub unsafe fn setTorchModeOnWithLevel_error(
1153            &self,
1154            torch_level: c_float,
1155        ) -> Result<(), Retained<NSError>>;
1156    );
1157}
1158
1159/// Constants indicating the mode of the focus on the receiver's device, if it has one.
1160///
1161///
1162/// Indicates that the focus should be locked at the lens' current position.
1163///
1164/// Indicates that the device should autofocus once and then change the focus mode to AVCaptureFocusModeLocked.
1165///
1166/// Indicates that the device should automatically focus when needed.
1167///
1168/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturefocusmode?language=objc)
1169// NS_ENUM
1170#[repr(transparent)]
1171#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
1172pub struct AVCaptureFocusMode(pub NSInteger);
1173impl AVCaptureFocusMode {
1174    #[doc(alias = "AVCaptureFocusModeLocked")]
1175    pub const Locked: Self = Self(0);
1176    #[doc(alias = "AVCaptureFocusModeAutoFocus")]
1177    pub const AutoFocus: Self = Self(1);
1178    #[doc(alias = "AVCaptureFocusModeContinuousAutoFocus")]
1179    pub const ContinuousAutoFocus: Self = Self(2);
1180}
1181
1182unsafe impl Encode for AVCaptureFocusMode {
1183    const ENCODING: Encoding = NSInteger::ENCODING;
1184}
1185
1186unsafe impl RefEncode for AVCaptureFocusMode {
1187    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
1188}
1189
1190/// Constants indicating the restriction of the receiver's autofocus system to a particular range of focus scan, if it supports range restrictions.
1191///
1192///
1193/// Indicates that the autofocus system should not restrict the focus range.
1194///
1195/// Indicates that the autofocus system should restrict the focus range for subject matter that is near to the camera.
1196///
1197/// Indicates that the autofocus system should restrict the focus range for subject matter that is far from the camera.
1198///
1199/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcaptureautofocusrangerestriction?language=objc)
1200// NS_ENUM
1201#[repr(transparent)]
1202#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
1203pub struct AVCaptureAutoFocusRangeRestriction(pub NSInteger);
1204impl AVCaptureAutoFocusRangeRestriction {
1205    #[doc(alias = "AVCaptureAutoFocusRangeRestrictionNone")]
1206    pub const None: Self = Self(0);
1207    #[doc(alias = "AVCaptureAutoFocusRangeRestrictionNear")]
1208    pub const Near: Self = Self(1);
1209    #[doc(alias = "AVCaptureAutoFocusRangeRestrictionFar")]
1210    pub const Far: Self = Self(2);
1211}
1212
1213unsafe impl Encode for AVCaptureAutoFocusRangeRestriction {
1214    const ENCODING: Encoding = NSInteger::ENCODING;
1215}
1216
1217unsafe impl RefEncode for AVCaptureAutoFocusRangeRestriction {
1218    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
1219}
1220
1221extern "C" {
1222    /// A special value that may be passed as the lensPosition parameter of setFocusModeLockedWithLensPosition:completionHandler: to indicate that the caller does not wish to specify a value for the lensPosition property, and that it should instead be set to its current value. Note that the device may be adjusting lensPosition at the time of the call, in which case the value at which lensPosition is locked may differ from the value obtained by querying the lensPosition property.
1223    ///
1224    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturelenspositioncurrent?language=objc)
1225    pub static AVCaptureLensPositionCurrent: c_float;
1226}
1227
1228/// Constants indicating the focus behavior when recording a Cinematic Video.
1229///
1230/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturecinematicvideofocusmode?language=objc)
1231// NS_ENUM
1232#[repr(transparent)]
1233#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
1234pub struct AVCaptureCinematicVideoFocusMode(pub NSInteger);
1235impl AVCaptureCinematicVideoFocusMode {
1236    /// Indicates that no focus mode is specified, in which case weak focus is used as default.
1237    #[doc(alias = "AVCaptureCinematicVideoFocusModeNone")]
1238    pub const None: Self = Self(0);
1239    /// Indicates that the subject should remain in focus until it exits the scene.
1240    #[doc(alias = "AVCaptureCinematicVideoFocusModeStrong")]
1241    pub const Strong: Self = Self(1);
1242    /// Indicates that the Cinematic Video algorithm should automatically adjust focus according to the prominence of the subjects in the scene.
1243    #[doc(alias = "AVCaptureCinematicVideoFocusModeWeak")]
1244    pub const Weak: Self = Self(2);
1245}
1246
1247unsafe impl Encode for AVCaptureCinematicVideoFocusMode {
1248    const ENCODING: Encoding = NSInteger::ENCODING;
1249}
1250
1251unsafe impl RefEncode for AVCaptureCinematicVideoFocusMode {
1252    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
1253}
1254
1255/// AVCaptureDeviceFocus.
1256impl AVCaptureDevice {
1257    extern_methods!(
1258        /// Returns whether the receiver supports the given focus mode.
1259        ///
1260        ///
1261        /// Parameter `focusMode`: An AVCaptureFocusMode to be checked.
1262        ///
1263        /// Returns: YES if the receiver supports the given focus mode, NO otherwise.
1264        ///
1265        ///
1266        /// The receiver's focusMode property can only be set to a certain mode if this method returns YES for that mode.
1267        #[unsafe(method(isFocusModeSupported:))]
1268        #[unsafe(method_family = none)]
1269        pub unsafe fn isFocusModeSupported(&self, focus_mode: AVCaptureFocusMode) -> bool;
1270
1271        /// Indicates whether the receiver supports a lens position other than AVCaptureLensPositionCurrent.
1272        ///
1273        ///
1274        /// If lockingFocusWithCustomLensPositionSupported returns NO, setFocusModeLockedWithLensPosition: may only be called with AVCaptureLensPositionCurrent. Passing any other lens position will result in an exception.
1275        #[unsafe(method(isLockingFocusWithCustomLensPositionSupported))]
1276        #[unsafe(method_family = none)]
1277        pub unsafe fn isLockingFocusWithCustomLensPositionSupported(&self) -> bool;
1278
1279        /// Indicates current focus mode of the receiver, if it has one.
1280        ///
1281        ///
1282        /// The value of this property is an AVCaptureFocusMode that determines the receiver's focus mode, if it has one. -setFocusMode: throws an NSInvalidArgumentException if set to an unsupported value (see -isFocusModeSupported:). -setFocusMode: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. Clients can observe automatic changes to the receiver's focusMode by key value observing this property.
1283        #[unsafe(method(focusMode))]
1284        #[unsafe(method_family = none)]
1285        pub unsafe fn focusMode(&self) -> AVCaptureFocusMode;
1286
1287        /// Setter for [`focusMode`][Self::focusMode].
1288        #[unsafe(method(setFocusMode:))]
1289        #[unsafe(method_family = none)]
1290        pub unsafe fn setFocusMode(&self, focus_mode: AVCaptureFocusMode);
1291
1292        /// Indicates whether the receiver supports focus points of interest.
1293        ///
1294        ///
1295        /// The receiver's focusPointOfInterest property can only be set if this property returns YES.
1296        #[unsafe(method(isFocusPointOfInterestSupported))]
1297        #[unsafe(method_family = none)]
1298        pub unsafe fn isFocusPointOfInterestSupported(&self) -> bool;
1299
1300        #[cfg(feature = "objc2-core-foundation")]
1301        /// Indicates current focus point of interest of the receiver, if it has one.
1302        ///
1303        ///
1304        /// The value of this property is a CGPoint that determines the receiver's focus point of interest, if it has one. A value of (0,0) indicates that the camera should focus on the top left corner of the image, while a value of (1,1) indicates that it should focus on the bottom right. The default value is (0.5,0.5). -setFocusPointOfInterest: throws an NSInvalidArgumentException if isFocusPointOfInterestSupported returns NO. -setFocusPointOfInterest: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. Clients can observe automatic changes to the receiver's focusPointOfInterest by key value observing this property. Note that setting focusPointOfInterest alone does not initiate a focus operation. After setting focusPointOfInterest, call -setFocusMode: to apply the new point of interest.
1305        #[unsafe(method(focusPointOfInterest))]
1306        #[unsafe(method_family = none)]
1307        pub unsafe fn focusPointOfInterest(&self) -> CGPoint;
1308
1309        #[cfg(feature = "objc2-core-foundation")]
1310        /// Setter for [`focusPointOfInterest`][Self::focusPointOfInterest].
1311        #[unsafe(method(setFocusPointOfInterest:))]
1312        #[unsafe(method_family = none)]
1313        pub unsafe fn setFocusPointOfInterest(&self, focus_point_of_interest: CGPoint);
1314
1315        /// Whether the receiver supports focus rectangles of interest.
1316        ///
1317        /// You may only set the device's ``focusRectOfInterest`` property if this property returns `true`.
1318        #[unsafe(method(isFocusRectOfInterestSupported))]
1319        #[unsafe(method_family = none)]
1320        pub unsafe fn isFocusRectOfInterestSupported(&self) -> bool;
1321
1322        #[cfg(feature = "objc2-core-foundation")]
1323        /// The minimum size you may use when specifying a rectangle of interest.
1324        ///
1325        /// The size returned is in normalized coordinates, and depends on the current ``AVCaptureDevice/activeFormat``. If ``focusRectOfInterestSupported`` returns `false`, this property returns { 0, 0 }.
1326        #[unsafe(method(minFocusRectOfInterestSize))]
1327        #[unsafe(method_family = none)]
1328        pub unsafe fn minFocusRectOfInterestSize(&self) -> CGSize;
1329
1330        #[cfg(feature = "objc2-core-foundation")]
1331        /// The device's current focus rectangle of interest, if it has one.
1332        ///
1333        /// The value of this property is a ``CGRect`` determining the device's focus rectangle of interest. Use this as an alternative to setting ``focusPointOfInterest``, as it allows you to specify both a location and size. For example, a value of `CGRectMake(0, 0, 1, 1)` tells the device to use the entire field of view when determining the focus, while `CGRectMake(0, 0, 0.25, 0.25)` indicates the top left sixteenth, and `CGRectMake(0.75, 0.75, 0.25, 0.25)` indicates the bottom right sixteenth. Setting ``focusRectOfInterest`` throws an `NSInvalidArgumentException` if ``focusRectOfInterestSupported`` returns `false`. Setting ``focusRectOfInterest`` throws an `NSInvalidArgumentException` if your provided rectangle's size is smaller than the ``minFocusRectOfInterestSize``. Setting ``focusRectOfInterest`` throws an `NSGenericException` if you call it without first obtaining exclusive access to the device using ``AVCaptureDevice/lockForConfiguration:``. Setting ``focusRectOfInterest`` updates the device's ``focusPointOfInterest`` to the center of your provided rectangle of interest. If you later set the device's ``focusPointOfInterest``, the ``focusRectOfInterest`` resets to the default sized rectangle of interest for the new focus point of interest. If you change your ``AVCaptureDevice/activeFormat``, the point of interest and rectangle of interest both revert to their default values. You can observe automatic changes to the device's ``focusRectOfInterest`` by key-value observing this property.
1334        ///
1335        /// - Note: Setting ``focusRectOfInterest`` alone does not initiate a focus operation. After setting ``focusRectOfInterest``, set ``focusMode`` to apply the new rectangle of interest.
1336        #[unsafe(method(focusRectOfInterest))]
1337        #[unsafe(method_family = none)]
1338        pub unsafe fn focusRectOfInterest(&self) -> CGRect;
1339
1340        #[cfg(feature = "objc2-core-foundation")]
1341        /// Setter for [`focusRectOfInterest`][Self::focusRectOfInterest].
1342        #[unsafe(method(setFocusRectOfInterest:))]
1343        #[unsafe(method_family = none)]
1344        pub unsafe fn setFocusRectOfInterest(&self, focus_rect_of_interest: CGRect);
1345
1346        #[cfg(feature = "objc2-core-foundation")]
1347        /// The default rectangle of interest used for a given focus point of interest.
1348        ///
1349        /// - Parameter pointOfInterest: The point of interest for which you want the default rectangle of interest.
1350        ///
1351        /// For example, pass `(0.5, 0.5)` to get the focus rectangle of interest used for the default focus point of interest at `(0.5, 0.5)`.
1352        ///
1353        /// - Note: The particular default rectangle returned depends on the current focus mode.
1354        ///
1355        /// This method returns `CGRectNull` if ``focusRectOfInterestSupported`` returns `false`.
1356        #[unsafe(method(defaultRectForFocusPointOfInterest:))]
1357        #[unsafe(method_family = none)]
1358        pub unsafe fn defaultRectForFocusPointOfInterest(
1359            &self,
1360            point_of_interest: CGPoint,
1361        ) -> CGRect;
1362
1363        /// Indicates whether the receiver is currently performing a focus scan to adjust focus.
1364        ///
1365        ///
1366        /// The value of this property is a BOOL indicating whether the receiver's camera focus is being automatically adjusted by means of a focus scan, because its focus mode is AVCaptureFocusModeAutoFocus or AVCaptureFocusModeContinuousAutoFocus. Clients can observe the value of this property to determine whether the camera's focus is stable.
1367        ///
1368        /// See also: lensPosition
1369        ///
1370        /// See also: AVCaptureAutoFocusSystem
1371        #[unsafe(method(isAdjustingFocus))]
1372        #[unsafe(method_family = none)]
1373        pub unsafe fn isAdjustingFocus(&self) -> bool;
1374
1375        /// Indicates whether the receiver supports autofocus range restrictions.
1376        ///
1377        ///
1378        /// The receiver's autoFocusRangeRestriction property can only be set if this property returns YES.
1379        #[unsafe(method(isAutoFocusRangeRestrictionSupported))]
1380        #[unsafe(method_family = none)]
1381        pub unsafe fn isAutoFocusRangeRestrictionSupported(&self) -> bool;
1382
1383        /// Indicates current restriction of the receiver's autofocus system to a particular range of focus scan, if it supports range restrictions.
1384        ///
1385        ///
1386        /// The value of this property is an AVCaptureAutoFocusRangeRestriction indicating how the autofocus system should limit its focus scan. The default value is AVCaptureAutoFocusRangeRestrictionNone. -setAutoFocusRangeRestriction: throws an NSInvalidArgumentException if isAutoFocusRangeRestrictionSupported returns NO. -setAutoFocusRangeRestriction: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. This property only has an effect when the focusMode property is set to AVCaptureFocusModeAutoFocus or AVCaptureFocusModeContinuousAutoFocus. Note that setting autoFocusRangeRestriction alone does not initiate a focus operation. After setting autoFocusRangeRestriction, call -setFocusMode: to apply the new restriction.
1387        #[unsafe(method(autoFocusRangeRestriction))]
1388        #[unsafe(method_family = none)]
1389        pub unsafe fn autoFocusRangeRestriction(&self) -> AVCaptureAutoFocusRangeRestriction;
1390
1391        /// Setter for [`autoFocusRangeRestriction`][Self::autoFocusRangeRestriction].
1392        #[unsafe(method(setAutoFocusRangeRestriction:))]
1393        #[unsafe(method_family = none)]
1394        pub unsafe fn setAutoFocusRangeRestriction(
1395            &self,
1396            auto_focus_range_restriction: AVCaptureAutoFocusRangeRestriction,
1397        );
1398
1399        /// Indicates whether the receiver supports smooth autofocus.
1400        ///
1401        ///
1402        /// The receiver's smoothAutoFocusEnabled property can only be set if this property returns YES.
1403        #[unsafe(method(isSmoothAutoFocusSupported))]
1404        #[unsafe(method_family = none)]
1405        pub unsafe fn isSmoothAutoFocusSupported(&self) -> bool;
1406
1407        /// Indicates whether the receiver should use smooth autofocus.
1408        ///
1409        ///
1410        /// On a receiver where -isSmoothAutoFocusSupported returns YES and smoothAutoFocusEnabled is set to YES, a smooth autofocus will be engaged when the focus mode is set to AVCaptureFocusModeAutoFocus or AVCaptureFocusModeContinuousAutoFocus. Enabling smooth autofocus is appropriate for movie recording. Smooth autofocus is slower and less visually invasive. Disabling smooth autofocus is more appropriate for video processing where a fast autofocus is necessary. The default value is NO. Setting this property throws an NSInvalidArgumentException if -isSmoothAutoFocusSupported returns NO. The receiver must be locked for configuration using lockForConfiguration: before clients can set this method, otherwise an NSGenericException is thrown. Note that setting smoothAutoFocusEnabled alone does not initiate a focus operation. After setting smoothAutoFocusEnabled, call -setFocusMode: to apply the new smooth autofocus mode.
1411        #[unsafe(method(isSmoothAutoFocusEnabled))]
1412        #[unsafe(method_family = none)]
1413        pub unsafe fn isSmoothAutoFocusEnabled(&self) -> bool;
1414
1415        /// Setter for [`isSmoothAutoFocusEnabled`][Self::isSmoothAutoFocusEnabled].
1416        #[unsafe(method(setSmoothAutoFocusEnabled:))]
1417        #[unsafe(method_family = none)]
1418        pub unsafe fn setSmoothAutoFocusEnabled(&self, smooth_auto_focus_enabled: bool);
1419
1420        /// Indicates whether the receiver should automatically adjust face-driven autofocus.
1421        ///
1422        ///
1423        /// The value of this property is a BOOL that determines the receiver's automatic adjustment of face-driven autofocus. Default is YES on all platforms, if the receiver supports autofocus. This property must be set to NO before manually setting faceDrivenAutoFocusEnabled to YES/NO. -setAutomaticallyAdjustsFaceDrivenAutoFocusEnabled: throws an NSInvalidArgumentException if the receiver doesn't support autofocus. -setAutomaticallyAdjustsFaceDrivenAutoFocusEnabled: throws an NSGenericException if called without first obtaining exclusive access to the receiver using -lockForConfiguration:. After setting automaticallyAdjustsFaceDrivenAutoFocusEnabled, call -setFocusMode: to apply the change.
1424        #[unsafe(method(automaticallyAdjustsFaceDrivenAutoFocusEnabled))]
1425        #[unsafe(method_family = none)]
1426        pub unsafe fn automaticallyAdjustsFaceDrivenAutoFocusEnabled(&self) -> bool;
1427
1428        /// Setter for [`automaticallyAdjustsFaceDrivenAutoFocusEnabled`][Self::automaticallyAdjustsFaceDrivenAutoFocusEnabled].
1429        #[unsafe(method(setAutomaticallyAdjustsFaceDrivenAutoFocusEnabled:))]
1430        #[unsafe(method_family = none)]
1431        pub unsafe fn setAutomaticallyAdjustsFaceDrivenAutoFocusEnabled(
1432            &self,
1433            automatically_adjusts_face_driven_auto_focus_enabled: bool,
1434        );
1435
1436        /// Indicates whether face-driven autofocus is enabled on the receiver.
1437        ///
1438        ///
1439        /// Default is YES for all apps linked on or after iOS 15.4 when the receiver supports autofocus. -setFaceDrivenAutoFocusEnabled: throws an NSInvalidArgumentException if automaticallyAdjustsFaceDrivenAutoFocusEnabled returns YES.  -setFaceDrivenAutoFocusEnabled: throws an NSInvalidArgumentException if the receiver doesn't support autofocus. -setFaceDrivenAutoFocusEnabled: throws an NSGenericException if called without first obtaining exclusive access to the receiver using -lockForConfiguration:. Note that setting faceDrivenAutoFocusEnabled alone does not initiate this focus change operation. After setting faceDrivenAutoFocusEnabled, call -setFocusMode: to apply the change.
1440        #[unsafe(method(isFaceDrivenAutoFocusEnabled))]
1441        #[unsafe(method_family = none)]
1442        pub unsafe fn isFaceDrivenAutoFocusEnabled(&self) -> bool;
1443
1444        /// Setter for [`isFaceDrivenAutoFocusEnabled`][Self::isFaceDrivenAutoFocusEnabled].
1445        #[unsafe(method(setFaceDrivenAutoFocusEnabled:))]
1446        #[unsafe(method_family = none)]
1447        pub unsafe fn setFaceDrivenAutoFocusEnabled(&self, face_driven_auto_focus_enabled: bool);
1448
1449        /// Indicates the focus position of the lens.
1450        ///
1451        ///
1452        /// The range of possible positions is 0.0 to 1.0, with 0.0 being the shortest distance at which the lens can focus and 1.0 the furthest. Note that 1.0 does not represent focus at infinity. The default value is 1.0. Note that a given lens position value does not correspond to an exact physical distance, nor does it represent a consistent focus distance from device to device. This property is key-value observable. It can be read at any time, regardless of focus mode, but can only be set via setFocusModeLockedWithLensPosition:completionHandler:.
1453        #[unsafe(method(lensPosition))]
1454        #[unsafe(method_family = none)]
1455        pub unsafe fn lensPosition(&self) -> c_float;
1456
1457        #[cfg(all(feature = "block2", feature = "objc2-core-media"))]
1458        /// Sets focusMode to AVCaptureFocusModeLocked and locks lensPosition at an explicit value.
1459        ///
1460        ///
1461        /// Parameter `lensPosition`: The lens position, as described in the documentation for the lensPosition property. A value of AVCaptureLensPositionCurrent can be used to indicate that the caller does not wish to specify a value for lensPosition.
1462        ///
1463        /// Parameter `handler`: A block to be called when lensPosition has been set to the value specified and focusMode is set to AVCaptureFocusModeLocked. If setFocusModeLockedWithLensPosition:completionHandler: is called multiple times, the completion handlers will be called in FIFO order. The block receives a timestamp which matches that of the first buffer to which all settings have been applied. Note that the timestamp is synchronized to the device clock, and thus must be converted to the `AVCaptureSession/synchronizationClock` prior to comparison with the timestamps of buffers delivered via an AVCaptureVideoDataOutput. The client may pass nil for the handler parameter if knowledge of the operation's completion is not required.
1464        ///
1465        ///
1466        /// This is the only way of setting lensPosition. This method throws an NSRangeException if lensPosition is set to an unsupported level. This method throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
1467        #[unsafe(method(setFocusModeLockedWithLensPosition:completionHandler:))]
1468        #[unsafe(method_family = none)]
1469        pub unsafe fn setFocusModeLockedWithLensPosition_completionHandler(
1470            &self,
1471            lens_position: c_float,
1472            handler: Option<&block2::DynBlock<dyn Fn(CMTime)>>,
1473        );
1474
1475        /// A property indicating the minimum focus distance.
1476        ///
1477        ///
1478        /// The minimum focus distance is given in millimeters, -1 if unknown. For virtual cameras (AVCaptureDeviceTypeBuiltInDualCamera, AVCaptureDeviceTypeBuiltInTripleCamera, etc.), the value reported is the smallest minimum focus distance of the auto-focus-capable cameras that it sources.
1479        #[unsafe(method(minimumFocusDistance))]
1480        #[unsafe(method_family = none)]
1481        pub unsafe fn minimumFocusDistance(&self) -> NSInteger;
1482
1483        /// Focus on and start tracking a detected object.
1484        ///
1485        /// - Parameter detectedObjectID: The ID of the detected object.
1486        /// - Parameter focusMode: Specify whether to focus strongly or weakly.
1487        #[unsafe(method(setCinematicVideoTrackingFocusWithDetectedObjectID:focusMode:))]
1488        #[unsafe(method_family = none)]
1489        pub unsafe fn setCinematicVideoTrackingFocusWithDetectedObjectID_focusMode(
1490            &self,
1491            detected_object_id: NSInteger,
1492            focus_mode: AVCaptureCinematicVideoFocusMode,
1493        );
1494
1495        #[cfg(feature = "objc2-core-foundation")]
1496        /// Focus on and start tracking an object if it can be detected at the region specified by the point.
1497        ///
1498        /// - Parameter point: A normalized point of interest (i.e., [0,1]) in the coordinate space of the device.
1499        /// - Parameter focusMode: Specify whether to focus strongly or weakly.
1500        #[unsafe(method(setCinematicVideoTrackingFocusAtPoint:focusMode:))]
1501        #[unsafe(method_family = none)]
1502        pub unsafe fn setCinematicVideoTrackingFocusAtPoint_focusMode(
1503            &self,
1504            point: CGPoint,
1505            focus_mode: AVCaptureCinematicVideoFocusMode,
1506        );
1507
1508        #[cfg(feature = "objc2-core-foundation")]
1509        /// Fix focus at a distance.
1510        ///
1511        /// - Parameter point: A normalized point of interest (i.e., [0,1]) in the coordinate space of the device.
1512        /// - Parameter focusMode: Specify whether to focus strongly or weakly.
1513        ///
1514        /// The distance at which focus is set is determined internally using signals such as depth data.
1515        #[unsafe(method(setCinematicVideoFixedFocusAtPoint:focusMode:))]
1516        #[unsafe(method_family = none)]
1517        pub unsafe fn setCinematicVideoFixedFocusAtPoint_focusMode(
1518            &self,
1519            point: CGPoint,
1520            focus_mode: AVCaptureCinematicVideoFocusMode,
1521        );
1522    );
1523}
1524
1525/// Constants indicating the mode of the exposure on the receiver's device, if it has adjustable exposure.
1526///
1527///
1528/// Indicates that the exposure should be locked at its current value.
1529///
1530/// Indicates that the device should automatically adjust exposure once and then change the exposure mode to AVCaptureExposureModeLocked.
1531///
1532/// Indicates that the device should automatically adjust exposure when needed.
1533///
1534/// Indicates that the device should only adjust exposure according to user provided ISO, exposureDuration values.
1535///
1536/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcaptureexposuremode?language=objc)
1537// NS_ENUM
1538#[repr(transparent)]
1539#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
1540pub struct AVCaptureExposureMode(pub NSInteger);
1541impl AVCaptureExposureMode {
1542    #[doc(alias = "AVCaptureExposureModeLocked")]
1543    pub const Locked: Self = Self(0);
1544    #[doc(alias = "AVCaptureExposureModeAutoExpose")]
1545    pub const AutoExpose: Self = Self(1);
1546    #[doc(alias = "AVCaptureExposureModeContinuousAutoExposure")]
1547    pub const ContinuousAutoExposure: Self = Self(2);
1548    #[doc(alias = "AVCaptureExposureModeCustom")]
1549    pub const Custom: Self = Self(3);
1550}
1551
1552unsafe impl Encode for AVCaptureExposureMode {
1553    const ENCODING: Encoding = NSInteger::ENCODING;
1554}
1555
1556unsafe impl RefEncode for AVCaptureExposureMode {
1557    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
1558}
1559
1560extern "C" {
1561    /// A special value that may be passed as the duration parameter of setExposureModeCustomWithDuration:ISO:completionHandler: to indicate that the caller does not wish to specify a value for the exposureDuration property, and that it should instead be set to its current value. Note that the device may be adjusting exposureDuration at the time of the call, in which case the value to which exposureDuration is set may differ from the value obtained by querying the exposureDuration property.
1562    ///
1563    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcaptureexposuredurationcurrent?language=objc)
1564    #[cfg(feature = "objc2-core-media")]
1565    pub static AVCaptureExposureDurationCurrent: CMTime;
1566}
1567
1568extern "C" {
1569    /// A special value that may be passed as the ISO parameter of setExposureModeCustomWithDuration:ISO:completionHandler: to indicate that the caller does not wish to specify a value for the ISO property, and that it should instead be set to its current value. Note that the device may be adjusting ISO at the time of the call, in which case the value to which ISO is set may differ from the value obtained by querying the ISO property.
1570    ///
1571    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcaptureisocurrent?language=objc)
1572    pub static AVCaptureISOCurrent: c_float;
1573}
1574
1575extern "C" {
1576    /// A special value that may be passed as the bias parameter of setExposureTargetBias:completionHandler: to indicate that the caller does not wish to specify a value for the exposureTargetBias property, and that it should instead be set to its current value.
1577    ///
1578    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcaptureexposuretargetbiascurrent?language=objc)
1579    pub static AVCaptureExposureTargetBiasCurrent: c_float;
1580}
1581
1582/// AVCaptureDeviceExposure.
1583impl AVCaptureDevice {
1584    extern_methods!(
1585        /// Returns whether the receiver supports the given exposure mode.
1586        ///
1587        ///
1588        /// Parameter `exposureMode`: An AVCaptureExposureMode to be checked.
1589        ///
1590        /// Returns: YES if the receiver supports the given exposure mode, NO otherwise.
1591        ///
1592        ///
1593        /// The receiver's exposureMode property can only be set to a certain mode if this method returns YES for that mode.
1594        #[unsafe(method(isExposureModeSupported:))]
1595        #[unsafe(method_family = none)]
1596        pub unsafe fn isExposureModeSupported(&self, exposure_mode: AVCaptureExposureMode) -> bool;
1597
1598        /// Indicates current exposure mode of the receiver, if it has adjustable exposure.
1599        ///
1600        ///
1601        /// The value of this property is an AVCaptureExposureMode that determines the receiver's exposure mode, if it has adjustable exposure. -setExposureMode: throws an NSInvalidArgumentException if set to an unsupported value (see -isExposureModeSupported:). -setExposureMode: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. When using AVCapturePhotoOutput and capturing photos with AVCapturePhotoSettings' photoQualityPrioritization property set to AVCapturePhotoQualityPrioritizationBalanced or higher, the receiver's ISO and exposureDuration values may be overridden when exposing the photo if the scene is dark enough to warrant some form of multi-image fusion to improve quality. To ensure that the receiver's ISO and exposureDuration values are honored while in AVCaptureExposureModeCustom or AVCaptureExposureModeLocked, you must set your AVCapturePhotoSettings.photoQualityPrioritization property to AVCapturePhotoQualityPrioritizationSpeed. The same rule applies if you are using the deprecated AVCapturePhotoSettings.autoStillImageStabilizationEnabled property; you must set it to NO to preserve your custom exposure values in the photo capture. Likewise if you're using AVCaptureStillImageOutput, automaticallyEnablesStillImageStabilizationWhenAvailable must be set to NO to preserve your custom exposure values in a still image capture. Clients can observe automatic changes to the receiver's exposureMode by key value observing this property.
1602        #[unsafe(method(exposureMode))]
1603        #[unsafe(method_family = none)]
1604        pub unsafe fn exposureMode(&self) -> AVCaptureExposureMode;
1605
1606        /// Setter for [`exposureMode`][Self::exposureMode].
1607        #[unsafe(method(setExposureMode:))]
1608        #[unsafe(method_family = none)]
1609        pub unsafe fn setExposureMode(&self, exposure_mode: AVCaptureExposureMode);
1610
1611        /// Indicates whether the receiver supports exposure points of interest.
1612        ///
1613        ///
1614        /// The receiver's exposurePointOfInterest property can only be set if this property returns YES.
1615        #[unsafe(method(isExposurePointOfInterestSupported))]
1616        #[unsafe(method_family = none)]
1617        pub unsafe fn isExposurePointOfInterestSupported(&self) -> bool;
1618
1619        #[cfg(feature = "objc2-core-foundation")]
1620        /// Indicates current exposure point of interest of the receiver, if it has one.
1621        ///
1622        ///
1623        /// The value of this property is a CGPoint that determines the receiver's exposure point of interest, if it has adjustable exposure. A value of (0,0) indicates that the camera should adjust exposure based on the top left corner of the image, while a value of (1,1) indicates that it should adjust exposure based on the bottom right corner. The default value is (0.5,0.5). -setExposurePointOfInterest: throws an NSInvalidArgumentException if isExposurePointOfInterestSupported returns NO. -setExposurePointOfInterest: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. Note that setting exposurePointOfInterest alone does not initiate an exposure operation. After setting exposurePointOfInterest, call -setExposureMode: to apply the new point of interest.
1624        #[unsafe(method(exposurePointOfInterest))]
1625        #[unsafe(method_family = none)]
1626        pub unsafe fn exposurePointOfInterest(&self) -> CGPoint;
1627
1628        #[cfg(feature = "objc2-core-foundation")]
1629        /// Setter for [`exposurePointOfInterest`][Self::exposurePointOfInterest].
1630        #[unsafe(method(setExposurePointOfInterest:))]
1631        #[unsafe(method_family = none)]
1632        pub unsafe fn setExposurePointOfInterest(&self, exposure_point_of_interest: CGPoint);
1633
1634        /// Whether the device supports exposure rectangles of interest.
1635        ///
1636        /// You may only set the device's ``exposureRectOfInterest`` property if this property returns `true`.
1637        #[unsafe(method(isExposureRectOfInterestSupported))]
1638        #[unsafe(method_family = none)]
1639        pub unsafe fn isExposureRectOfInterestSupported(&self) -> bool;
1640
1641        #[cfg(feature = "objc2-core-foundation")]
1642        /// The minimum size you may use when specifying a rectangle of interest.
1643        ///
1644        /// The size returned is in normalized coordinates, and depends on the current ``AVCaptureDevice/activeFormat``. If ``exposureRectOfInterestSupported`` returns `false`, this property returns { 0, 0 }.
1645        #[unsafe(method(minExposureRectOfInterestSize))]
1646        #[unsafe(method_family = none)]
1647        pub unsafe fn minExposureRectOfInterestSize(&self) -> CGSize;
1648
1649        #[cfg(feature = "objc2-core-foundation")]
1650        /// The device's current exposure rectangle of interest, if it has one.
1651        ///
1652        /// The value of this property is a ``CGRect`` determining the device's exposure rectangle of interest. Use this as an alternative to setting ``exposurePointOfInterest``, as it allows you to specify both a location and size. For example, a value of `CGRectMake(0, 0, 1, 1)` tells the device to use the entire field of view when determining the exposure, while `CGRectMake(0, 0, 0.25, 0.25)` indicates the top left sixteenth, and `CGRectMake(0.75, 0.75, 0.25, 0.25)` indicates the bottom right sixteenth. Setting ``exposureRectOfInterest`` throws an `NSInvalidArgumentException` if ``exposureRectOfInterestSupported`` returns `false`. Setting ``exposureRectOfInterest`` throws an `NSInvalidArgumentException` if your provided rectangle's size is smaller than the ``minExposureRectOfInterestSize``. Setting ``exposureRectOfInterest`` throws an `NSGenericException` if you call it without first obtaining exclusive access to the device using ``AVCaptureDevice/lockForConfiguration:``. Setting ``exposureRectOfInterest`` updates the device's ``exposurePointOfInterest`` to the center of your provided rectangle of interest. If you later set the device's ``exposurePointOfInterest``, the ``exposureRectOfInterest`` resets to the default sized rectangle of interest for the new exposure point of interest. If you change your ``AVCaptureDevice/activeFormat``, the point of interest and rectangle of interest both revert to their default values. You can observe automatic changes to the device's ``exposureRectOfInterest`` by key-value observing this property.
1653        ///
1654        /// - Note: Setting ``exposureRectOfInterest`` alone does not initiate an exposure operation. After setting ``exposureRectOfInterest``, set ``exposureMode`` to apply the new rectangle of interest.
1655        #[unsafe(method(exposureRectOfInterest))]
1656        #[unsafe(method_family = none)]
1657        pub unsafe fn exposureRectOfInterest(&self) -> CGRect;
1658
1659        #[cfg(feature = "objc2-core-foundation")]
1660        /// Setter for [`exposureRectOfInterest`][Self::exposureRectOfInterest].
1661        #[unsafe(method(setExposureRectOfInterest:))]
1662        #[unsafe(method_family = none)]
1663        pub unsafe fn setExposureRectOfInterest(&self, exposure_rect_of_interest: CGRect);
1664
1665        #[cfg(feature = "objc2-core-foundation")]
1666        /// The default rectangle of interest used for a given exposure point of interest.
1667        ///
1668        /// - Parameter pointOfInterest: The point of interest for which you want the default rectangle of interest.
1669        ///
1670        /// For example, pass `(0.5, 0.5)` to get the exposure rectangle of interest used for the default exposure point of interest at `(0.5, 0.5)`.
1671        ///
1672        /// This method returns `CGRectNull` if ``exposureRectOfInterestSupported`` returns `false`.
1673        #[unsafe(method(defaultRectForExposurePointOfInterest:))]
1674        #[unsafe(method_family = none)]
1675        pub unsafe fn defaultRectForExposurePointOfInterest(
1676            &self,
1677            point_of_interest: CGPoint,
1678        ) -> CGRect;
1679
1680        /// Indicates whether the receiver should automatically adjust face-driven auto exposure.
1681        ///
1682        ///
1683        /// The value of this property is a BOOL that determines the receiver's automatic adjustment of face-driven auto exposure. Default is YES on all platforms, if the receiver supports auto exposure. This property must be set to NO before manually setting faceDrivenAutoExposureEnabled to YES/NO. -setAutomaticallyAdjustsFaceDrivenAutoExposureEnabled: throws an NSInvalidArgumentException if the receiver doesn't support auto exposure. -setAutomaticallyAdjustsFaceDrivenAutoExposureEnabled: throws an NSGenericException if called without first obtaining exclusive access to the receiver using -lockForConfiguration:. After setting automaticallyAdjustsFaceDrivenAutoExposureEnabled, call -setExposureMode: to apply the change.
1684        #[unsafe(method(automaticallyAdjustsFaceDrivenAutoExposureEnabled))]
1685        #[unsafe(method_family = none)]
1686        pub unsafe fn automaticallyAdjustsFaceDrivenAutoExposureEnabled(&self) -> bool;
1687
1688        /// Setter for [`automaticallyAdjustsFaceDrivenAutoExposureEnabled`][Self::automaticallyAdjustsFaceDrivenAutoExposureEnabled].
1689        #[unsafe(method(setAutomaticallyAdjustsFaceDrivenAutoExposureEnabled:))]
1690        #[unsafe(method_family = none)]
1691        pub unsafe fn setAutomaticallyAdjustsFaceDrivenAutoExposureEnabled(
1692            &self,
1693            automatically_adjusts_face_driven_auto_exposure_enabled: bool,
1694        );
1695
1696        /// Indicates whether face-driven auto exposure is enabled on the receiver.
1697        ///
1698        ///
1699        /// Default is YES for all apps linked on or after iOS 15.4 when the receiver supports auto exposure. -setFaceDrivenAutoExposureEnabled: throws an NSInvalidArgumentException if automaticallyAdjustsFaceDrivenAutoExposureEnabled returns YES. -setFaceDrivenAutoExposureEnabled: throws an NSInvalidArgumentException if the receiver doesn't support auto exposure. -setFaceDrivenAutoExposureEnabled: throws an NSGenericException if called without first obtaining exclusive access to the receiver using -lockForConfiguration:. Note that setting faceDrivenAutoExposureEnabled alone does not initiate this exposure change operation. After setting faceDrivenAutoExposureEnabled, call -setExposureMode: to apply the change.
1700        #[unsafe(method(isFaceDrivenAutoExposureEnabled))]
1701        #[unsafe(method_family = none)]
1702        pub unsafe fn isFaceDrivenAutoExposureEnabled(&self) -> bool;
1703
1704        /// Setter for [`isFaceDrivenAutoExposureEnabled`][Self::isFaceDrivenAutoExposureEnabled].
1705        #[unsafe(method(setFaceDrivenAutoExposureEnabled:))]
1706        #[unsafe(method_family = none)]
1707        pub unsafe fn setFaceDrivenAutoExposureEnabled(
1708            &self,
1709            face_driven_auto_exposure_enabled: bool,
1710        );
1711
1712        #[cfg(feature = "objc2-core-media")]
1713        /// The maximum exposure (integration) time that may be used by the auto exposure algorithm.
1714        ///
1715        ///
1716        /// When an AVCaptureDevice's exposureMode is set to AVCaptureExposureModeAutoExpose or AVCaptureExposureModeContinuousAutoExposure, the auto exposure algorithm picks a default max exposure duration that is tuned for the current configuration, balancing low light image quality with motion preservation. By querying or key-value observing this property, you may find out the current max exposure duration in use. You may also override the default value by setting this property to a value between activeFormat.maxExposureDuration and activeFormat.minExposureDuration. An NSRangeException is thrown if you pass an out-of-bounds exposure duration. Setting the property to the special value of kCMTimeInvalid resets the auto exposure max duration to the device's default for your current configuration. When the device's activeFormat or the AVCaptureSession's sessionPreset changes, this property resets to the default max exposure duration for the new format or session preset.
1717        ///
1718        /// On some devices, the auto exposure algorithm picks a different max exposure duration for a given format depending whether you used the -[AVCaptureSession setSessionPreset:] API or the -[AVCaptureDevice setActiveFormat:] API to set the format. To ensure uniform default handling of max exposure duration, you can set your AVCaptureDeviceInput's unifiedAutoExposureDefaultsEnabled property to YES.
1719        #[unsafe(method(activeMaxExposureDuration))]
1720        #[unsafe(method_family = none)]
1721        pub unsafe fn activeMaxExposureDuration(&self) -> CMTime;
1722
1723        #[cfg(feature = "objc2-core-media")]
1724        /// Setter for [`activeMaxExposureDuration`][Self::activeMaxExposureDuration].
1725        #[unsafe(method(setActiveMaxExposureDuration:))]
1726        #[unsafe(method_family = none)]
1727        pub unsafe fn setActiveMaxExposureDuration(&self, active_max_exposure_duration: CMTime);
1728
1729        /// Indicates whether the receiver is currently adjusting camera exposure.
1730        ///
1731        ///
1732        /// The value of this property is a BOOL indicating whether the receiver's camera exposure is being automatically adjusted because its exposure mode is AVCaptureExposureModeAutoExpose or AVCaptureExposureModeContinuousAutoExposure. Clients can observe the value of this property to determine whether the camera exposure is stable or is being automatically adjusted.
1733        #[unsafe(method(isAdjustingExposure))]
1734        #[unsafe(method_family = none)]
1735        pub unsafe fn isAdjustingExposure(&self) -> bool;
1736
1737        /// The size of the lens diaphragm.
1738        ///
1739        ///
1740        /// The value of this property is a float indicating the size (f number) of the lens diaphragm. This property does not change.
1741        #[unsafe(method(lensAperture))]
1742        #[unsafe(method_family = none)]
1743        pub unsafe fn lensAperture(&self) -> c_float;
1744
1745        #[cfg(feature = "objc2-core-media")]
1746        /// The length of time over which exposure takes place.
1747        ///
1748        ///
1749        /// Only exposure duration values between activeFormat.minExposureDuration and activeFormat.maxExposureDuration are supported. This property is key-value observable. It can be read at any time, regardless of exposure mode, but can only be set via setExposureModeCustomWithDuration:ISO:completionHandler:.
1750        #[unsafe(method(exposureDuration))]
1751        #[unsafe(method_family = none)]
1752        pub unsafe fn exposureDuration(&self) -> CMTime;
1753
1754        /// The current exposure ISO value.
1755        ///
1756        ///
1757        /// This property controls the sensor's sensitivity to light by means of a gain value applied to the signal. Only ISO values between activeFormat.minISO and activeFormat.maxISO are supported. Higher values will result in noisier images. This property is key-value observable. It can be read at any time, regardless of exposure mode, but can only be set via setExposureModeCustomWithDuration:ISO:completionHandler:.
1758        #[unsafe(method(ISO))]
1759        #[unsafe(method_family = none)]
1760        pub unsafe fn ISO(&self) -> c_float;
1761
1762        #[cfg(all(feature = "block2", feature = "objc2-core-media"))]
1763        /// Sets exposureMode to AVCaptureExposureModeCustom and locks exposureDuration and ISO at explicit values.
1764        ///
1765        ///
1766        /// Parameter `duration`: The exposure duration, as described in the documentation for the exposureDuration property. A value of AVCaptureExposureDurationCurrent can be used to indicate that the caller does not wish to specify a value for exposureDuration. Note that changes to this property may result in changes to activeVideoMinFrameDuration and/or activeVideoMaxFrameDuration.
1767        ///
1768        /// Parameter `ISO`: The exposure ISO value, as described in the documentation for the ISO property. A value of AVCaptureISOCurrent can be used to indicate that the caller does not wish to specify a value for ISO.
1769        ///
1770        /// Parameter `handler`: A block to be called when both exposureDuration and ISO have been set to the values specified and exposureMode is set to AVCaptureExposureModeCustom. If setExposureModeCustomWithDuration:ISO:completionHandler: is called multiple times, the completion handlers will be called in FIFO order. The block receives a timestamp which matches that of the first buffer to which all settings have been applied. Note that the timestamp is synchronized to the device clock, and thus must be converted to the `AVCaptureSession/synchronizationClock` prior to comparison with the timestamps of buffers delivered via an AVCaptureVideoDataOutput. The client may pass nil for the handler parameter if knowledge of the operation's completion is not required.
1771        ///
1772        ///
1773        /// This is the only way of setting exposureDuration and ISO. This method throws an NSRangeException if either exposureDuration or ISO is set to an unsupported level. This method throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. When using AVCapturePhotoOutput to capture photos, note that the photoQualityPrioritization property of AVCapturePhotoSettings defaults to AVCapturePhotoQualityPrioritizationBalanced, which allows photo capture to temporarily override the capture device's ISO and exposureDuration values if the scene is dark enough to warrant some form of multi-image fusion to improve quality. To ensure that the receiver's ISO and exposureDuration values are honored while in AVCaptureExposureModeCustom or AVCaptureExposureModeLocked, you must set your AVCapturePhotoSettings.photoQualityPrioritization property to AVCapturePhotoQualityPrioritizationSpeed. The same rule applies if you use the deprecated AVCapturePhotoSettings.autoStillImageStabilizationEnabled property or AVCaptureStillImageOutput.automaticallyEnablesStillImageStabilizationWhenAvailable property. You must set them to NO to preserve your custom or locked exposure settings.
1774        #[unsafe(method(setExposureModeCustomWithDuration:ISO:completionHandler:))]
1775        #[unsafe(method_family = none)]
1776        pub unsafe fn setExposureModeCustomWithDuration_ISO_completionHandler(
1777            &self,
1778            duration: CMTime,
1779            iso: c_float,
1780            handler: Option<&block2::DynBlock<dyn Fn(CMTime)>>,
1781        );
1782
1783        /// Indicates the metered exposure level's offset from the target exposure value, in EV units.
1784        ///
1785        ///
1786        /// The value of this read-only property indicates the difference between the metered exposure level of the current scene and the target exposure value. This property is key-value observable.
1787        #[unsafe(method(exposureTargetOffset))]
1788        #[unsafe(method_family = none)]
1789        pub unsafe fn exposureTargetOffset(&self) -> c_float;
1790
1791        /// Bias applied to the target exposure value, in EV units.
1792        ///
1793        ///
1794        /// When exposureMode is AVCaptureExposureModeContinuousAutoExposure or AVCaptureExposureModeLocked, the bias will affect both metering (exposureTargetOffset), and the actual exposure level (exposureDuration and ISO). When the exposure mode is AVCaptureExposureModeCustom, it will only affect metering. This property is key-value observable. It can be read at any time, but can only be set via setExposureTargetBias:completionHandler:.
1795        #[unsafe(method(exposureTargetBias))]
1796        #[unsafe(method_family = none)]
1797        pub unsafe fn exposureTargetBias(&self) -> c_float;
1798
1799        /// A float indicating the minimum supported exposure bias, in EV units.
1800        ///
1801        ///
1802        /// This read-only property indicates the minimum supported exposure bias.
1803        #[unsafe(method(minExposureTargetBias))]
1804        #[unsafe(method_family = none)]
1805        pub unsafe fn minExposureTargetBias(&self) -> c_float;
1806
1807        /// A float indicating the maximum supported exposure bias, in EV units.
1808        ///
1809        ///
1810        /// This read-only property indicates the maximum supported exposure bias.
1811        #[unsafe(method(maxExposureTargetBias))]
1812        #[unsafe(method_family = none)]
1813        pub unsafe fn maxExposureTargetBias(&self) -> c_float;
1814
1815        #[cfg(all(feature = "block2", feature = "objc2-core-media"))]
1816        /// Sets the bias to be applied to the target exposure value.
1817        ///
1818        ///
1819        /// Parameter `bias`: The bias to be applied to the exposure target value, as described in the documentation for the exposureTargetBias property.
1820        ///
1821        /// Parameter `handler`: A block to be called when exposureTargetBias has been set to the value specified. If setExposureTargetBias:completionHandler: is called multiple times, the completion handlers will be called in FIFO order. The block receives a timestamp which matches that of the first buffer to which the setting has been applied. Note that the timestamp is synchronized to the device clock, and thus must be converted to the `AVCaptureSession/synchronizationClock` prior to comparison with the timestamps of buffers delivered via an AVCaptureVideoDataOutput. The client may pass nil for the handler parameter if knowledge of the operation's completion is not required.
1822        ///
1823        ///
1824        /// This is the only way of setting exposureTargetBias. This method throws an NSRangeException if exposureTargetBias is set to an unsupported level. This method throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
1825        #[unsafe(method(setExposureTargetBias:completionHandler:))]
1826        #[unsafe(method_family = none)]
1827        pub unsafe fn setExposureTargetBias_completionHandler(
1828            &self,
1829            bias: c_float,
1830            handler: Option<&block2::DynBlock<dyn Fn(CMTime)>>,
1831        );
1832    );
1833}
1834
1835/// AVCaptureDeviceToneMapping.
1836impl AVCaptureDevice {
1837    extern_methods!(
1838        /// Indicates whether the receiver should use global tone mapping.
1839        ///
1840        ///
1841        /// Tone mapping is a technique used by the device to map the pixel levels in high dynamic range images to a more limited dynamic range (such as 16 bit to 8 bit), while still retaining as close an appearance as possible. Normally the device employs adaptive, local tone curves to preserve highest image quality and adapt quickly to changing lighting conditions.
1842        ///
1843        /// This property indicates to the receiver to use a global tone map. If set to YES, the tone map is adjusted dynamically depending on the current scene and the same tone map is applied to all pixels in an image. If set to its default value of NO, different tone maps may be applied to different pixels in an image.
1844        ///
1845        /// globalToneMappingEnabled may only be set to YES if the receiver's activeFormat.isGlobalToneMappingSupported property returns YES, otherwise an NSGenericException is thrown. Setting globalToneMappingEnabled throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
1846        ///
1847        /// When global tone mapping is enabled, an AVCapturePhotoOutput connected to the AVCaptureDeviceInput’s session disables all forms of still image fusion, resulting in still images with no automatic stabilization applied.
1848        ///
1849        /// The receiver’s globalToneMappingEnabled resets to its default value of NO under the following conditions:
1850        /// - The receiver’s activeFormat changes
1851        /// - The receiver’s AVCaptureDeviceInput’s session’s sessionPreset changes
1852        /// - The receiver’s AVCaptureDeviceInput is added to a session
1853        ///
1854        /// Clients can observe automatic changes to the receiver's globalToneMappingEnabled by key value observing this property.
1855        #[unsafe(method(isGlobalToneMappingEnabled))]
1856        #[unsafe(method_family = none)]
1857        pub unsafe fn isGlobalToneMappingEnabled(&self) -> bool;
1858
1859        /// Setter for [`isGlobalToneMappingEnabled`][Self::isGlobalToneMappingEnabled].
1860        #[unsafe(method(setGlobalToneMappingEnabled:))]
1861        #[unsafe(method_family = none)]
1862        pub unsafe fn setGlobalToneMappingEnabled(&self, global_tone_mapping_enabled: bool);
1863    );
1864}
1865
1866/// Constants indicating the mode of the white balance on the receiver's device, if it has adjustable white balance.
1867///
1868///
1869/// Indicates that the white balance should be locked at its current value.
1870///
1871/// Indicates that the device should automatically adjust white balance once and then change the white balance mode to AVCaptureWhiteBalanceModeLocked.
1872///
1873/// Indicates that the device should automatically adjust white balance when needed.
1874///
1875/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturewhitebalancemode?language=objc)
1876// NS_ENUM
1877#[repr(transparent)]
1878#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
1879pub struct AVCaptureWhiteBalanceMode(pub NSInteger);
1880impl AVCaptureWhiteBalanceMode {
1881    #[doc(alias = "AVCaptureWhiteBalanceModeLocked")]
1882    pub const Locked: Self = Self(0);
1883    #[doc(alias = "AVCaptureWhiteBalanceModeAutoWhiteBalance")]
1884    pub const AutoWhiteBalance: Self = Self(1);
1885    #[doc(alias = "AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance")]
1886    pub const ContinuousAutoWhiteBalance: Self = Self(2);
1887}
1888
1889unsafe impl Encode for AVCaptureWhiteBalanceMode {
1890    const ENCODING: Encoding = NSInteger::ENCODING;
1891}
1892
1893unsafe impl RefEncode for AVCaptureWhiteBalanceMode {
1894    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
1895}
1896
1897/// Structure containing RGB white balance gain values.
1898///
1899/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturewhitebalancegains?language=objc)
1900#[repr(C)]
1901#[derive(Clone, Copy, Debug, PartialEq)]
1902pub struct AVCaptureWhiteBalanceGains {
1903    pub redGain: c_float,
1904    pub greenGain: c_float,
1905    pub blueGain: c_float,
1906}
1907
1908unsafe impl Encode for AVCaptureWhiteBalanceGains {
1909    const ENCODING: Encoding = Encoding::Struct(
1910        "?",
1911        &[
1912            <c_float>::ENCODING,
1913            <c_float>::ENCODING,
1914            <c_float>::ENCODING,
1915        ],
1916    );
1917}
1918
1919unsafe impl RefEncode for AVCaptureWhiteBalanceGains {
1920    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
1921}
1922
1923/// Structure containing CIE 1931 xy chromaticity values.
1924///
1925/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturewhitebalancechromaticityvalues?language=objc)
1926#[repr(C)]
1927#[derive(Clone, Copy, Debug, PartialEq)]
1928pub struct AVCaptureWhiteBalanceChromaticityValues {
1929    pub x: c_float,
1930    pub y: c_float,
1931}
1932
1933unsafe impl Encode for AVCaptureWhiteBalanceChromaticityValues {
1934    const ENCODING: Encoding = Encoding::Struct("?", &[<c_float>::ENCODING, <c_float>::ENCODING]);
1935}
1936
1937unsafe impl RefEncode for AVCaptureWhiteBalanceChromaticityValues {
1938    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
1939}
1940
1941/// Structure containing a white balance color correlated temperature in kelvin, plus a tint value in the range of [-150 - +150].
1942///
1943/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturewhitebalancetemperatureandtintvalues?language=objc)
1944#[repr(C)]
1945#[derive(Clone, Copy, Debug, PartialEq)]
1946pub struct AVCaptureWhiteBalanceTemperatureAndTintValues {
1947    pub temperature: c_float,
1948    pub tint: c_float,
1949}
1950
1951unsafe impl Encode for AVCaptureWhiteBalanceTemperatureAndTintValues {
1952    const ENCODING: Encoding = Encoding::Struct("?", &[<c_float>::ENCODING, <c_float>::ENCODING]);
1953}
1954
1955unsafe impl RefEncode for AVCaptureWhiteBalanceTemperatureAndTintValues {
1956    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
1957}
1958
1959extern "C" {
1960    /// Temperature and tint values ideal for scenes illuminated with a tungsten light source.
1961    ///
1962    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturewhitebalancetemperatureandtintvaluestungsten?language=objc)
1963    pub static AVCaptureWhiteBalanceTemperatureAndTintValuesTungsten:
1964        AVCaptureWhiteBalanceTemperatureAndTintValues;
1965}
1966
1967extern "C" {
1968    /// Temperature and tint values ideal for scenes illuminated with a fluorescent light source.
1969    ///
1970    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturewhitebalancetemperatureandtintvaluesfluorescent?language=objc)
1971    pub static AVCaptureWhiteBalanceTemperatureAndTintValuesFluorescent:
1972        AVCaptureWhiteBalanceTemperatureAndTintValues;
1973}
1974
1975extern "C" {
1976    /// Temperature and tint values ideal for scenes illuminated with natural daylight.
1977    ///
1978    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturewhitebalancetemperatureandtintvaluesdaylight?language=objc)
1979    pub static AVCaptureWhiteBalanceTemperatureAndTintValuesDaylight:
1980        AVCaptureWhiteBalanceTemperatureAndTintValues;
1981}
1982
1983extern "C" {
1984    /// Temperature and tint values ideal for scenes illuminated with natural cloudy daylight.
1985    ///
1986    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturewhitebalancetemperatureandtintvaluescloudy?language=objc)
1987    pub static AVCaptureWhiteBalanceTemperatureAndTintValuesCloudy:
1988        AVCaptureWhiteBalanceTemperatureAndTintValues;
1989}
1990
1991extern "C" {
1992    /// Temperature and tint values ideal for scenes illuminated with daylight but in heavy shade.
1993    ///
1994    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturewhitebalancetemperatureandtintvaluesshadow?language=objc)
1995    pub static AVCaptureWhiteBalanceTemperatureAndTintValuesShadow:
1996        AVCaptureWhiteBalanceTemperatureAndTintValues;
1997}
1998
1999extern "C" {
2000    /// A special value that may be passed as a parameter of setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:completionHandler: to indicate that the caller does not wish to specify a value for deviceWhiteBalanceGains, and that gains should instead be locked at their value at the moment that white balance is locked.
2001    ///
2002    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturewhitebalancegainscurrent?language=objc)
2003    pub static AVCaptureWhiteBalanceGainsCurrent: AVCaptureWhiteBalanceGains;
2004}
2005
2006/// AVCaptureDeviceWhiteBalance.
2007impl AVCaptureDevice {
2008    extern_methods!(
2009        /// Returns whether the receiver supports the given white balance mode.
2010        ///
2011        ///
2012        /// Parameter `whiteBalanceMode`: An AVCaptureWhiteBalanceMode to be checked.
2013        ///
2014        /// Returns: YES if the receiver supports the given white balance mode, NO otherwise.
2015        ///
2016        ///
2017        /// The receiver's whiteBalanceMode property can only be set to a certain mode if this method returns YES for that mode.
2018        #[unsafe(method(isWhiteBalanceModeSupported:))]
2019        #[unsafe(method_family = none)]
2020        pub unsafe fn isWhiteBalanceModeSupported(
2021            &self,
2022            white_balance_mode: AVCaptureWhiteBalanceMode,
2023        ) -> bool;
2024
2025        /// Indicates whether the receiver supports white balance gains other than AVCaptureWhiteBalanceGainsCurrent.
2026        ///
2027        ///
2028        /// If lockingWhiteBalanceWithCustomDeviceGainsSupported returns NO, setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains: may only be called with AVCaptureWhiteBalanceGainsCurrent. Passing any other white balance gains will result in an exception.
2029        #[unsafe(method(isLockingWhiteBalanceWithCustomDeviceGainsSupported))]
2030        #[unsafe(method_family = none)]
2031        pub unsafe fn isLockingWhiteBalanceWithCustomDeviceGainsSupported(&self) -> bool;
2032
2033        /// Indicates current white balance mode of the receiver, if it has adjustable white balance.
2034        ///
2035        ///
2036        /// The value of this property is an AVCaptureWhiteBalanceMode that determines the receiver's white balance mode, if it has adjustable white balance. -setWhiteBalanceMode: throws an NSInvalidArgumentException if set to an unsupported value (see -isWhiteBalanceModeSupported:). -setWhiteBalanceMode: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. Clients can observe automatic changes to the receiver's whiteBalanceMode by key value observing this property.
2037        #[unsafe(method(whiteBalanceMode))]
2038        #[unsafe(method_family = none)]
2039        pub unsafe fn whiteBalanceMode(&self) -> AVCaptureWhiteBalanceMode;
2040
2041        /// Setter for [`whiteBalanceMode`][Self::whiteBalanceMode].
2042        #[unsafe(method(setWhiteBalanceMode:))]
2043        #[unsafe(method_family = none)]
2044        pub unsafe fn setWhiteBalanceMode(&self, white_balance_mode: AVCaptureWhiteBalanceMode);
2045
2046        /// Indicates whether the receiver is currently adjusting camera white balance.
2047        ///
2048        ///
2049        /// The value of this property is a BOOL indicating whether the receiver's camera white balance is being automatically adjusted because its white balance mode is AVCaptureWhiteBalanceModeAutoWhiteBalance or AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance. Clients can observe the value of this property to determine whether the camera white balance is stable or is being automatically adjusted.
2050        #[unsafe(method(isAdjustingWhiteBalance))]
2051        #[unsafe(method_family = none)]
2052        pub unsafe fn isAdjustingWhiteBalance(&self) -> bool;
2053
2054        /// Indicates the current device-specific RGB white balance gain values in use.
2055        ///
2056        ///
2057        /// This property specifies the current red, green, and blue gain values used for white balance. The values can be used to adjust color casts for a given scene. For each channel, only values between 1.0 and -maxWhiteBalanceGain are supported. This property is key-value observable. It can be read at any time, regardless of white balance mode, but can only be set via setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:completionHandler:.
2058        #[unsafe(method(deviceWhiteBalanceGains))]
2059        #[unsafe(method_family = none)]
2060        pub unsafe fn deviceWhiteBalanceGains(&self) -> AVCaptureWhiteBalanceGains;
2061
2062        /// Indicates the current device-specific Gray World RGB white balance gain values in use.
2063        ///
2064        ///
2065        /// This property specifies the current red, green, and blue gain values derived from the current scene to deliver a neutral (or "Gray World") white point for white balance. Gray World values assume a neutral subject (e.g. a gray card) has been placed in the middle of the subject area and fills the center 50% of the frame. Clients can read these values and apply them to the device using setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:completionHandler:. For each channel, only values between 1.0 and -maxWhiteBalanceGain are supported. This property is key-value observable. It can be read at any time, regardless of white balance mode.
2066        #[unsafe(method(grayWorldDeviceWhiteBalanceGains))]
2067        #[unsafe(method_family = none)]
2068        pub unsafe fn grayWorldDeviceWhiteBalanceGains(&self) -> AVCaptureWhiteBalanceGains;
2069
2070        /// Indicates the maximum supported value to which a channel in the AVCaptureWhiteBalanceGains may be set.
2071        ///
2072        ///
2073        /// This property does not change for the life of the receiver.
2074        #[unsafe(method(maxWhiteBalanceGain))]
2075        #[unsafe(method_family = none)]
2076        pub unsafe fn maxWhiteBalanceGain(&self) -> c_float;
2077
2078        #[cfg(all(feature = "block2", feature = "objc2-core-media"))]
2079        /// Sets white balance to locked mode with explicit temperature and tint values.
2080        ///
2081        /// - Parameter whiteBalanceTemperatureAndTintValues: The white balance temperature and tint values, as computed from ``temperatureAndTintValuesForDeviceWhiteBalanceGains:`` method, ``AVCaptureWhiteBalanceTemperatureAndTintValues`` presets or manual input.
2082        ///
2083        /// - Parameter handler: A block to be called when white balance values have been set to the values specified and ``whiteBalanceMode`` is set to ``AVCaptureWhiteBalanceModeLocked``. If ``setWhiteBalanceModeLockedWithDeviceWhiteBalanceTemperatureAndTintValues:completionHandler:`` is called multiple times, the completion handlers are called in FIFO order. The block receives a timestamp which matches that of the first buffer to which all settings have been applied. Note that the timestamp is synchronized to the device clock, and thus must be converted to the ``AVCaptureSession/synchronizationClock`` prior to comparison with the timestamps of buffers delivered via an ``AVCaptureVideoDataOutput``. This parameter may be `nil` if synchronization is not required.
2084        ///
2085        /// This method takes a ``AVCaptureWhiteBalanceTemperatureAndTintValues`` struct and applies the appropriate ``AVCaptureWhiteBalanceGains``. This method throws an `NSRangeException` if any of the values are set to an unsupported level. This method throws an `NSGenericException` if called without first obtaining exclusive access to the device using ``AVCaptureDevice/lockForConfiguration:``.
2086        #[unsafe(method(setWhiteBalanceModeLockedWithDeviceWhiteBalanceTemperatureAndTintValues:completionHandler:))]
2087        #[unsafe(method_family = none)]
2088        pub unsafe fn setWhiteBalanceModeLockedWithDeviceWhiteBalanceTemperatureAndTintValues_completionHandler(
2089            &self,
2090            white_balance_temperature_and_tint_values: AVCaptureWhiteBalanceTemperatureAndTintValues,
2091            handler: Option<&block2::DynBlock<dyn Fn(CMTime)>>,
2092        );
2093
2094        #[cfg(all(feature = "block2", feature = "objc2-core-media"))]
2095        /// Sets white balance to locked mode with explicit deviceWhiteBalanceGains values.
2096        ///
2097        ///
2098        /// Parameter `whiteBalanceGains`: The white balance gain values, as described in the documentation for the deviceWhiteBalanceGains property. A value of AVCaptureWhiteBalanceGainsCurrent can be used to indicate that the caller does not wish to specify a value for deviceWhiteBalanceGains.
2099        ///
2100        /// Parameter `handler`: A block to be called when white balance gains have been set to the values specified and whiteBalanceMode is set to AVCaptureWhiteBalanceModeLocked. If setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:completionHandler: is called multiple times, the completion handlers will be called in FIFO order. The block receives a timestamp which matches that of the first buffer to which all settings have been applied. Note that the timestamp is synchronized to the device clock, and thus must be converted to the `AVCaptureSession/synchronizationClock` prior to comparison with the timestamps of buffers delivered via an AVCaptureVideoDataOutput. This parameter may be nil if synchronization is not required.
2101        ///
2102        ///
2103        /// Gain values are normalized to the minimum channel value to avoid brightness changes (e.g. R:2 G:2 B:4 will be normalized to R:1 G:1 B:2). For each channel in the whiteBalanceGains struct, only values between 1.0 and maxWhiteBalanceGain after nomalization are supported.  This method throws an NSRangeException if any of the whiteBalanceGains are set to an unsupported level. This method throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
2104        #[unsafe(method(setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:completionHandler:))]
2105        #[unsafe(method_family = none)]
2106        pub unsafe fn setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains_completionHandler(
2107            &self,
2108            white_balance_gains: AVCaptureWhiteBalanceGains,
2109            handler: Option<&block2::DynBlock<dyn Fn(CMTime)>>,
2110        );
2111
2112        /// Converts device-specific white balance RGB gain values to device-independent chromaticity values.
2113        ///
2114        ///
2115        /// Parameter `whiteBalanceGains`: White balance gain values, as described in the documentation for the deviceWhiteBalanceGains property. A value of AVCaptureWhiteBalanceGainsCurrent may not be used in this function.
2116        ///
2117        /// Returns: A fully populated AVCaptureWhiteBalanceChromaticityValues structure containing device-independent values.
2118        ///
2119        ///
2120        /// This method may be called on the receiver to convert device-specific white balance RGB gain values to device-independent chromaticity (little x, little y) values. For each channel in the whiteBalanceGains struct, only values between 1.0 and -maxWhiteBalanceGain are supported. This method throws an NSRangeException if any of the whiteBalanceGains are set to unsupported values.
2121        #[unsafe(method(chromaticityValuesForDeviceWhiteBalanceGains:))]
2122        #[unsafe(method_family = none)]
2123        pub unsafe fn chromaticityValuesForDeviceWhiteBalanceGains(
2124            &self,
2125            white_balance_gains: AVCaptureWhiteBalanceGains,
2126        ) -> AVCaptureWhiteBalanceChromaticityValues;
2127
2128        /// Converts device-independent chromaticity values to device-specific white balance RGB gain values.
2129        ///
2130        ///
2131        /// Parameter `chromaticityValues`: Little x, little y chromaticity values as described in the documentation for AVCaptureWhiteBalanceChromaticityValues.
2132        ///
2133        /// Returns: A fully populated AVCaptureWhiteBalanceGains structure containing device-specific RGB gain values.
2134        ///
2135        ///
2136        /// This method may be called on the receiver to convert device-independent chromaticity values to device-specific RGB white balance gain values. This method throws an NSRangeException if any of the chromaticityValues are set outside the range [0,1]. Note that some x,y combinations yield out-of-range device RGB values that will cause an exception to be thrown if passed directly to -setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:completionHandler:. Be sure to check that red, green, and blue gain values are within the range of [1.0 - maxWhiteBalanceGain].
2137        #[unsafe(method(deviceWhiteBalanceGainsForChromaticityValues:))]
2138        #[unsafe(method_family = none)]
2139        pub unsafe fn deviceWhiteBalanceGainsForChromaticityValues(
2140            &self,
2141            chromaticity_values: AVCaptureWhiteBalanceChromaticityValues,
2142        ) -> AVCaptureWhiteBalanceGains;
2143
2144        /// Converts device-specific white balance RGB gain values to device-independent temperature and tint values.
2145        ///
2146        ///
2147        /// Parameter `whiteBalanceGains`: White balance gain values, as described in the documentation for the deviceWhiteBalanceGains property. A value of AVCaptureWhiteBalanceGainsCurrent may not be used in this function.
2148        ///
2149        /// Returns: A fully populated AVCaptureWhiteBalanceTemperatureAndTintValues structure containing device-independent values.
2150        ///
2151        ///
2152        /// This method may be called on the receiver to convert device-specific white balance RGB gain values to device-independent temperature (in kelvin) and tint values. For each channel in the whiteBalanceGains struct, only values between 1.0 and -maxWhiteBalanceGain are supported. This method throws an NSRangeException if any of the whiteBalanceGains are set to unsupported values.
2153        #[unsafe(method(temperatureAndTintValuesForDeviceWhiteBalanceGains:))]
2154        #[unsafe(method_family = none)]
2155        pub unsafe fn temperatureAndTintValuesForDeviceWhiteBalanceGains(
2156            &self,
2157            white_balance_gains: AVCaptureWhiteBalanceGains,
2158        ) -> AVCaptureWhiteBalanceTemperatureAndTintValues;
2159
2160        /// Converts device-independent temperature and tint values to device-specific white balance RGB gain values.
2161        ///
2162        ///
2163        /// Parameter `tempAndTintValues`: Temperature and tint values as described in the documentation for AVCaptureWhiteBalanceTemperatureAndTintValues.
2164        ///
2165        /// Returns: A fully populated AVCaptureWhiteBalanceGains structure containing device-specific RGB gain values.
2166        ///
2167        ///
2168        /// This method may be called on the receiver to convert device-independent temperature and tint values to device-specific RGB white balance gain values. You may pass any temperature and tint values and corresponding white balance gains will be produced. Note though that some temperature and tint combinations yield out-of-range device RGB values that will cause an exception to be thrown if passed directly to -setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:completionHandler:. Be sure to check that red, green, and blue gain values are within the range of [1.0 - maxWhiteBalanceGain].
2169        #[unsafe(method(deviceWhiteBalanceGainsForTemperatureAndTintValues:))]
2170        #[unsafe(method_family = none)]
2171        pub unsafe fn deviceWhiteBalanceGainsForTemperatureAndTintValues(
2172            &self,
2173            temp_and_tint_values: AVCaptureWhiteBalanceTemperatureAndTintValues,
2174        ) -> AVCaptureWhiteBalanceGains;
2175    );
2176}
2177
2178/// AVCaptureDeviceSubjectAreaChangeMonitoring.
2179impl AVCaptureDevice {
2180    extern_methods!(
2181        /// Indicates whether the receiver should monitor the subject area for changes.
2182        ///
2183        ///
2184        /// The value of this property is a BOOL indicating whether the receiver should monitor the video subject area for changes, such as lighting changes, substantial movement, etc. If subject area change monitoring is enabled, the receiver sends an AVCaptureDeviceSubjectAreaDidChangeNotification whenever it detects a change to the subject area, at which time an interested client may wish to re-focus, adjust exposure, white balance, etc. The receiver must be locked for configuration using lockForConfiguration: before clients can set the value of this property.
2185        #[unsafe(method(isSubjectAreaChangeMonitoringEnabled))]
2186        #[unsafe(method_family = none)]
2187        pub unsafe fn isSubjectAreaChangeMonitoringEnabled(&self) -> bool;
2188
2189        /// Setter for [`isSubjectAreaChangeMonitoringEnabled`][Self::isSubjectAreaChangeMonitoringEnabled].
2190        #[unsafe(method(setSubjectAreaChangeMonitoringEnabled:))]
2191        #[unsafe(method_family = none)]
2192        pub unsafe fn setSubjectAreaChangeMonitoringEnabled(
2193            &self,
2194            subject_area_change_monitoring_enabled: bool,
2195        );
2196    );
2197}
2198
2199/// AVCaptureDeviceLowLightBoost.
2200impl AVCaptureDevice {
2201    extern_methods!(
2202        /// Indicates whether the receiver supports boosting images in low light conditions.
2203        ///
2204        ///
2205        /// The receiver's automaticallyEnablesLowLightBoostWhenAvailable property can only be set if this property returns YES.
2206        #[unsafe(method(isLowLightBoostSupported))]
2207        #[unsafe(method_family = none)]
2208        pub unsafe fn isLowLightBoostSupported(&self) -> bool;
2209
2210        /// Indicates whether the receiver's low light boost feature is enabled.
2211        ///
2212        ///
2213        /// The value of this property is a BOOL indicating whether the receiver is currently enhancing images to improve quality due to low light conditions. When -isLowLightBoostEnabled returns YES, the receiver has switched into a special mode in which more light can be perceived in images. This property is key-value observable.
2214        #[unsafe(method(isLowLightBoostEnabled))]
2215        #[unsafe(method_family = none)]
2216        pub unsafe fn isLowLightBoostEnabled(&self) -> bool;
2217
2218        /// Indicates whether the receiver should automatically switch to low light boost mode when necessary.
2219        ///
2220        ///
2221        /// On a receiver where -isLowLightBoostSupported returns YES, a special low light boost mode may be engaged to improve image quality. When the automaticallyEnablesLowLightBoostWhenAvailable property is set to YES, the receiver switches at its discretion to a special boost mode under low light, and back to normal operation when the scene becomes sufficiently lit. An AVCaptureDevice that supports this feature may only engage boost mode for certain source formats or resolutions. Clients may observe changes to the lowLightBoostEnabled property to know when the mode has engaged. The switch between normal operation and low light boost mode may drop one or more video frames. The default value is NO. Setting this property throws an NSInvalidArgumentException if -isLowLightBoostSupported returns NO. The receiver must be locked for configuration using lockForConfiguration: before clients can set this method, otherwise an NSGenericException is thrown.
2222        #[unsafe(method(automaticallyEnablesLowLightBoostWhenAvailable))]
2223        #[unsafe(method_family = none)]
2224        pub unsafe fn automaticallyEnablesLowLightBoostWhenAvailable(&self) -> bool;
2225
2226        /// Setter for [`automaticallyEnablesLowLightBoostWhenAvailable`][Self::automaticallyEnablesLowLightBoostWhenAvailable].
2227        #[unsafe(method(setAutomaticallyEnablesLowLightBoostWhenAvailable:))]
2228        #[unsafe(method_family = none)]
2229        pub unsafe fn setAutomaticallyEnablesLowLightBoostWhenAvailable(
2230            &self,
2231            automatically_enables_low_light_boost_when_available: bool,
2232        );
2233    );
2234}
2235
2236/// AVCaptureDeviceVideoZoom.
2237impl AVCaptureDevice {
2238    extern_methods!(
2239        #[cfg(feature = "objc2-core-foundation")]
2240        /// Controls zoom level of image outputs
2241        ///
2242        ///
2243        /// Applies a centered crop for all image outputs, scaling as necessary to maintain output dimensions. Minimum value of 1.0 yields full field of view, increasing values will increase magnification, up to a maximum value specified in the activeFormat's videoMaxZoomFactor property. Modifying the zoom factor will cancel any active rampToVideoZoomFactor:withRate:, and snap directly to the assigned value. Assigning values outside the acceptable range will generate an NSRangeException. Clients can key value observe the value of this property. When depth data delivery is enabled, changing the zoom factor sets the videoZoomFactor to the nearest supportedVideoZoomFactor from -[AVCaptureDeviceFormat supportedVideoZoomFactorsForDepthDataDelivery] with a disruptive reconfiguration of the capture render pipeline.
2244        ///
2245        /// -setVideoZoomFactor: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
2246        ///
2247        ///
2248        /// See also: -[AVCaptureDeviceFormat videoMaxZoomFactor], -[AVCaptureDeviceFormat videoZoomFactorUpscaleThreshold], -[AVCaptureDevice minAvailableVideoZoomFactor], -[AVCaptureDevice maxAvailableVideoZoomFactor],  -[AVCaptureDeviceFormat supportedVideoZoomFactorsForDepthDataDelivery], -[AVCaptureDeviceFormat videoMinZoomFactorForCenterStage] and -[AVCaptureDeviceFormat videoMaxZoomFactorForCenterStage]
2249        #[unsafe(method(videoZoomFactor))]
2250        #[unsafe(method_family = none)]
2251        pub unsafe fn videoZoomFactor(&self) -> CGFloat;
2252
2253        #[cfg(feature = "objc2-core-foundation")]
2254        /// Setter for [`videoZoomFactor`][Self::videoZoomFactor].
2255        #[unsafe(method(setVideoZoomFactor:))]
2256        #[unsafe(method_family = none)]
2257        pub unsafe fn setVideoZoomFactor(&self, video_zoom_factor: CGFloat);
2258
2259        #[cfg(feature = "objc2-core-foundation")]
2260        /// Provides smooth changes in zoom factor.
2261        ///
2262        ///
2263        /// This method provides a change in zoom by compounding magnification at the specified rate over time. Although the zoom factor will grow exponentially, this yields a visually linear zoom in the image over time.
2264        ///
2265        /// The zoom transition will stop at the specified factor, which must be in the valid range for videoZoomFactor. Assignments to videoZoomFactor while a ramp is in progress will cancel the ramp and snap to the assigned value.
2266        ///
2267        /// The zoom factor is continuously scaled by pow(2,rate * time). A rate of 0 causes no change in zoom factor, equivalent to calling cancelVideoZoomRamp. A rate of 1 will cause the magnification to double every second (or halve every second if zooming out), and similarly larger or smaller values will zoom faster or slower respectively. Only the absolute value of the rate is significant--sign is corrected for the direction of the target. Changes in rate will be smoothed by an internal acceleration limit.
2268        ///
2269        /// When depth data delivery is enabled, -rampToVideoZoomFactor:withRate: sets the videoZoomFactor to the nearest supportedVideoZoomFactor from -[AVCaptureDeviceFormat supportedVideoZoomFactorsForDepthDataDelivery] with a disruptive reconfiguration of the capture render pipeline.
2270        ///
2271        /// -rampToVideoZoomFactor:withRate: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
2272        #[unsafe(method(rampToVideoZoomFactor:withRate:))]
2273        #[unsafe(method_family = none)]
2274        pub unsafe fn rampToVideoZoomFactor_withRate(&self, factor: CGFloat, rate: c_float);
2275
2276        /// Indicates if the zoom factor is transitioning to a value set by rampToVideoZoomFactor:withRate:
2277        ///
2278        ///
2279        /// Clients can observe this value to determine when a ramp begins or completes.
2280        #[unsafe(method(isRampingVideoZoom))]
2281        #[unsafe(method_family = none)]
2282        pub unsafe fn isRampingVideoZoom(&self) -> bool;
2283
2284        /// Eases out of any video zoom transitions initiated by rampToVideoZoomFactor:withRate:
2285        ///
2286        ///
2287        /// This method is equivalent to calling rampToVideoZoomFactor:withRate: using the current zoom factor target and a rate of 0. This allows a smooth stop to any changes in zoom which were in progress.
2288        ///
2289        /// -cancelVideoZoomRamp: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
2290        #[unsafe(method(cancelVideoZoomRamp))]
2291        #[unsafe(method_family = none)]
2292        pub unsafe fn cancelVideoZoomRamp(&self);
2293
2294        #[cfg(feature = "objc2-core-foundation")]
2295        /// The video zoom factor at or above which a DualCamera can select between its wide angle camera and its telephoto camera.
2296        ///
2297        ///
2298        /// This is the zoom factor at which the wide angle camera's field of view matches telephoto camera's full field of view. On non-DualCamera devices this will return 1.0. As of iOS 13.0, this API has been deprecated in favor of virtualDeviceSwitchOverVideoZoomFactors.
2299        #[deprecated]
2300        #[unsafe(method(dualCameraSwitchOverVideoZoomFactor))]
2301        #[unsafe(method_family = none)]
2302        pub unsafe fn dualCameraSwitchOverVideoZoomFactor(&self) -> CGFloat;
2303
2304        #[cfg(feature = "objc2-core-foundation")]
2305        /// A multiplier that can be used with the receiver's videoZoomFactor property for displaying a video zoom factor in a user interface.
2306        ///
2307        ///
2308        /// In some system user interfaces, like the macOS Video Effects Menu, the video zoom factor value is displayed in a way most appropriate for visual representation and might differ from the videoZoomFactor property value on the receiver by a fixed ratio. For example, if the videoZoomFactor property value is 1.0 and the displayVideoZoomFactorMultiplier property value is 0.5, then multiplying 1.0 and 0.5 produces 0.5 which can be displayed in the UI. Client applications can key value observe this property to update the display video zoom factor values in their UI to stay consistent with Apple's system UIs.
2309        #[unsafe(method(displayVideoZoomFactorMultiplier))]
2310        #[unsafe(method_family = none)]
2311        pub unsafe fn displayVideoZoomFactorMultiplier(&self) -> CGFloat;
2312    );
2313}
2314
2315/// Constants indicating the client's authorization to the underlying hardware supporting a media type.
2316///
2317///
2318/// Indicates that the user has not yet made a choice regarding whether the client can access the hardware.
2319///
2320/// The client is not authorized to access the hardware for the media type. The user cannot change the client's status, possibly due to active restrictions such as parental controls being in place.
2321///
2322/// The user explicitly denied access to the hardware supporting a media type for the client.
2323///
2324/// The client is authorized to access the hardware supporting a media type.
2325///
2326/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avauthorizationstatus?language=objc)
2327// NS_ENUM
2328#[repr(transparent)]
2329#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
2330pub struct AVAuthorizationStatus(pub NSInteger);
2331impl AVAuthorizationStatus {
2332    #[doc(alias = "AVAuthorizationStatusNotDetermined")]
2333    pub const NotDetermined: Self = Self(0);
2334    #[doc(alias = "AVAuthorizationStatusRestricted")]
2335    pub const Restricted: Self = Self(1);
2336    #[doc(alias = "AVAuthorizationStatusDenied")]
2337    pub const Denied: Self = Self(2);
2338    #[doc(alias = "AVAuthorizationStatusAuthorized")]
2339    pub const Authorized: Self = Self(3);
2340}
2341
2342unsafe impl Encode for AVAuthorizationStatus {
2343    const ENCODING: Encoding = NSInteger::ENCODING;
2344}
2345
2346unsafe impl RefEncode for AVAuthorizationStatus {
2347    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
2348}
2349
2350/// AVCaptureDeviceAuthorization.
2351impl AVCaptureDevice {
2352    extern_methods!(
2353        #[cfg(feature = "AVMediaFormat")]
2354        /// Returns the client's authorization status for accessing the underlying hardware that supports a given media type.
2355        ///
2356        ///
2357        /// Parameter `mediaType`: The media type, either AVMediaTypeVideo or AVMediaTypeAudio
2358        ///
2359        /// Returns: The authorization status of the client
2360        ///
2361        ///
2362        /// This method returns the AVAuthorizationStatus of the client for accessing the underlying hardware supporting the media type. Media type constants are defined in AVMediaFormat.h. If any media type other than AVMediaTypeVideo or AVMediaTypeAudio is supplied, an NSInvalidArgumentException will be thrown. If the status is AVAuthorizationStatusNotDetermined, you may use the +requestAccessForMediaType:completionHandler: method to request access by prompting the user.
2363        #[unsafe(method(authorizationStatusForMediaType:))]
2364        #[unsafe(method_family = none)]
2365        pub unsafe fn authorizationStatusForMediaType(
2366            media_type: &AVMediaType,
2367        ) -> AVAuthorizationStatus;
2368
2369        #[cfg(all(feature = "AVMediaFormat", feature = "block2"))]
2370        /// Requests access to the underlying hardware for the media type, showing a dialog to the user if necessary.
2371        ///
2372        ///
2373        /// Parameter `mediaType`: The media type, either AVMediaTypeVideo or AVMediaTypeAudio
2374        ///
2375        /// Parameter `handler`: A block called with the result of requesting access
2376        ///
2377        ///
2378        /// Use this function to request access to the hardware for a given media type. Media type constants are defined in AVMediaFormat.h. If any media type other than AVMediaTypeVideo or AVMediaTypeAudio is supplied, an NSInvalidArgumentException will be thrown.
2379        ///
2380        /// This call will not block while the user is being asked for access, allowing the client to continue running. Until access has been granted, any AVCaptureDevices for the media type will vend silent audio samples or black video frames. The user is only asked for permission the first time the client requests access. Later calls use the permission granted by the user.
2381        ///
2382        /// Note that the authorization dialog will automatically be shown if the status is AVAuthorizationStatusNotDetermined when creating an AVCaptureDeviceInput.
2383        ///
2384        /// Invoking this method with AVMediaTypeAudio is equivalent to calling -[AVAudioSession requestRecordPermission:].
2385        ///
2386        /// The completion handler is called on an arbitrary dispatch queue. It is the client's responsibility to ensure that any UIKit-related updates are called on the main queue or main thread as a result.
2387        #[unsafe(method(requestAccessForMediaType:completionHandler:))]
2388        #[unsafe(method_family = none)]
2389        pub unsafe fn requestAccessForMediaType_completionHandler(
2390            media_type: &AVMediaType,
2391            handler: &block2::DynBlock<dyn Fn(Bool)>,
2392        );
2393    );
2394}
2395
2396/// A constant that is used to specify the transport controls' speed.
2397///
2398/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturedevicetransportcontrolsspeed?language=objc)
2399pub type AVCaptureDeviceTransportControlsSpeed = c_float;
2400
2401/// Constants indicating the transport controls' current mode of play back, if it has one.
2402///
2403///
2404/// Indicates that the tape transport is not threaded through the play head.
2405///
2406/// Indicates that the tape transport is threaded through the play head.
2407///
2408/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturedevicetransportcontrolsplaybackmode?language=objc)
2409// NS_ENUM
2410#[repr(transparent)]
2411#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
2412pub struct AVCaptureDeviceTransportControlsPlaybackMode(pub NSInteger);
2413impl AVCaptureDeviceTransportControlsPlaybackMode {
2414    #[doc(alias = "AVCaptureDeviceTransportControlsNotPlayingMode")]
2415    pub const NotPlayingMode: Self = Self(0);
2416    #[doc(alias = "AVCaptureDeviceTransportControlsPlayingMode")]
2417    pub const PlayingMode: Self = Self(1);
2418}
2419
2420unsafe impl Encode for AVCaptureDeviceTransportControlsPlaybackMode {
2421    const ENCODING: Encoding = NSInteger::ENCODING;
2422}
2423
2424unsafe impl RefEncode for AVCaptureDeviceTransportControlsPlaybackMode {
2425    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
2426}
2427
2428/// AVCaptureDeviceTransportControls.
2429impl AVCaptureDevice {
2430    extern_methods!(
2431        /// Returns whether the receiver supports transport control commands.
2432        ///
2433        ///
2434        /// For devices with transport controls, such as AVC tape-based camcorders or pro capture devices with RS422 deck control, the value of this property is YES. If transport controls are not supported, none of the associated transport control methods and properties are available on the receiver.
2435        #[unsafe(method(transportControlsSupported))]
2436        #[unsafe(method_family = none)]
2437        pub unsafe fn transportControlsSupported(&self) -> bool;
2438
2439        /// Returns the receiver's current playback mode.
2440        ///
2441        ///
2442        /// For devices that support transport control, this property may be queried to discover the current playback mode.
2443        #[unsafe(method(transportControlsPlaybackMode))]
2444        #[unsafe(method_family = none)]
2445        pub unsafe fn transportControlsPlaybackMode(
2446            &self,
2447        ) -> AVCaptureDeviceTransportControlsPlaybackMode;
2448
2449        /// Returns the receiver's current playback speed as a floating point value.
2450        ///
2451        ///
2452        /// For devices that support transport control, this property may be queried to discover the current playback speed of the deck.
2453        /// 0.0 -> stopped.
2454        /// 1.0 -> forward at normal speed.
2455        /// -1.0-> reverse at normal speed.
2456        /// 2.0 -> forward at 2x normal speed.
2457        /// etc.
2458        #[unsafe(method(transportControlsSpeed))]
2459        #[unsafe(method_family = none)]
2460        pub unsafe fn transportControlsSpeed(&self) -> AVCaptureDeviceTransportControlsSpeed;
2461
2462        /// Sets both the transport controls playback mode and speed in a single method.
2463        ///
2464        ///
2465        /// Parameter `mode`: A AVCaptureDeviceTransportControlsPlaybackMode indicating whether the deck should be put into play mode.
2466        ///
2467        /// Parameter `speed`: A AVCaptureDeviceTransportControlsSpeed indicating the speed at which to wind or play the tape.
2468        ///
2469        ///
2470        /// A method for setting the receiver's transport controls playback mode and speed. The receiver must be locked for configuration using lockForConfiguration: before clients can set this method, otherwise an NSGenericException is thrown.
2471        #[unsafe(method(setTransportControlsPlaybackMode:speed:))]
2472        #[unsafe(method_family = none)]
2473        pub unsafe fn setTransportControlsPlaybackMode_speed(
2474            &self,
2475            mode: AVCaptureDeviceTransportControlsPlaybackMode,
2476            speed: AVCaptureDeviceTransportControlsSpeed,
2477        );
2478    );
2479}
2480
2481/// AVCaptureDeviceHighDynamicRangeSupport.
2482impl AVCaptureDevice {
2483    extern_methods!(
2484        /// Indicates whether the receiver is allowed to turn high dynamic range streaming on or off.
2485        ///
2486        ///
2487        /// The value of this property is a BOOL indicating whether the receiver is free to turn high dynamic range streaming on or off. This property defaults to YES. When automaticallyAdjustsVideoHDREnabled, the AVCaptureDevice turns videoHDR on automatically if it's a good fit for the activeFormat. -setAutomaticallyAdjustsVideoHDREnabled: throws an NSGenericException if called without first obtaining exclusive access to the receiver using -lockForConfiguration:. Clients can key-value observe videoHDREnabled to know when the receiver has automatically changed the value.
2488        #[unsafe(method(automaticallyAdjustsVideoHDREnabled))]
2489        #[unsafe(method_family = none)]
2490        pub unsafe fn automaticallyAdjustsVideoHDREnabled(&self) -> bool;
2491
2492        /// Setter for [`automaticallyAdjustsVideoHDREnabled`][Self::automaticallyAdjustsVideoHDREnabled].
2493        #[unsafe(method(setAutomaticallyAdjustsVideoHDREnabled:))]
2494        #[unsafe(method_family = none)]
2495        pub unsafe fn setAutomaticallyAdjustsVideoHDREnabled(
2496            &self,
2497            automatically_adjusts_video_hdr_enabled: bool,
2498        );
2499
2500        /// Indicates whether the receiver's streaming high dynamic range feature is enabled. See AVCaptureDeviceFormat.isVideoHDRSupported.
2501        ///
2502        ///
2503        /// The value of this property is a BOOL indicating whether the receiver is currently streaming high dynamic range video buffers, also known as Extended Dynamic Range (EDR). The value of this property is ignored when device.activeColorSpace is HLG BT2020 color space since HDR is effectively always on and can't be disabled. The property may only be set if you first set automaticallyAdjustsVideoHDREnabled to NO, otherwise an NSGenericException is thrown. videoHDREnabled may only be set to YES if the receiver's activeFormat.isVideoHDRSupported property returns YES, otherwise an NSGenericException is thrown. This property may be key-value observed.
2504        ///
2505        /// Note that setting this property may cause a lengthy reconfiguration of the receiver, similar to setting a new active format or AVCaptureSession sessionPreset. If you are setting either the active format or the AVCaptureSession's sessionPreset AND this property, you should bracket these operations with [session beginConfiguration] and [session commitConfiguration] to minimize reconfiguration time.
2506        #[unsafe(method(isVideoHDREnabled))]
2507        #[unsafe(method_family = none)]
2508        pub unsafe fn isVideoHDREnabled(&self) -> bool;
2509
2510        /// Setter for [`isVideoHDREnabled`][Self::isVideoHDREnabled].
2511        #[unsafe(method(setVideoHDREnabled:))]
2512        #[unsafe(method_family = none)]
2513        pub unsafe fn setVideoHDREnabled(&self, video_hdr_enabled: bool);
2514    );
2515}
2516
2517/// Constants indicating active or supported video color space.
2518///
2519/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturecolorspace?language=objc)
2520// NS_ENUM
2521#[repr(transparent)]
2522#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
2523pub struct AVCaptureColorSpace(pub NSInteger);
2524impl AVCaptureColorSpace {
2525    /// The sRGB color space ( https://www.w3.org/Graphics/Color/srgb ).
2526    #[doc(alias = "AVCaptureColorSpace_sRGB")]
2527    pub const sRGB: Self = Self(0);
2528    /// The P3 D65 wide color space which uses Illuminant D65 as the white point.
2529    #[doc(alias = "AVCaptureColorSpace_P3_D65")]
2530    pub const P3_D65: Self = Self(1);
2531    /// The BT2020 wide color space which uses Illuminant D65 as the white point and Hybrid Log-Gamma as the transfer function.
2532    #[doc(alias = "AVCaptureColorSpace_HLG_BT2020")]
2533    pub const HLG_BT2020: Self = Self(2);
2534    /// The Apple Log Color space, which uses BT2020 as the color primaries, and an Apple defined Log curve as a transfer function. When you set this as the active color space on an ``AVCaptureDevice``, any ``AVCapturePhotoOutput`` or ``AVCaptureStillImageOutput`` connected to the same ``AVCaptureDevice`` is made inactive (its ``AVCaptureConnection/active`` property returns `false`).
2535    #[doc(alias = "AVCaptureColorSpace_AppleLog")]
2536    pub const AppleLog: Self = Self(3);
2537    /// The Apple Log 2 Color space, which uses Apple Gamut as the color primaries, and an Apple defined Log curve as a transfer function. When you set this as the active color space on an ``AVCaptureDevice``, any ``AVCapturePhotoOutput`` or ``AVCaptureStillImageOutput`` connected to the same ``AVCaptureDevice`` is made inactive (its ``AVCaptureConnection/active`` property returns `false`).
2538    #[doc(alias = "AVCaptureColorSpace_AppleLog2")]
2539    pub const AppleLog2: Self = Self(4);
2540}
2541
2542unsafe impl Encode for AVCaptureColorSpace {
2543    const ENCODING: Encoding = NSInteger::ENCODING;
2544}
2545
2546unsafe impl RefEncode for AVCaptureColorSpace {
2547    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
2548}
2549
2550/// AVCaptureDeviceColorSpaceSupport.
2551impl AVCaptureDevice {
2552    extern_methods!(
2553        /// Indicates the receiver's current active color space.
2554        ///
2555        ///
2556        /// By default, an AVCaptureDevice attached to an AVCaptureSession is automatically configured for wide color by the AVCaptureSession (see AVCaptureSession automaticallyConfiguresCaptureDeviceForWideColor). You may also set the activeColorSpace manually. To prevent the AVCaptureSession from undoing your work, remember to set AVCaptureSession's automaticallyConfiguresCaptureDeviceForWideColor property to NO. Changing the receiver's activeColorSpace while the session is running requires a disruptive reconfiguration of the capture render pipeline. Movie captures in progress will be ended immediately; unfulfilled photo requests will be aborted; video preview will temporarily freeze. -setActiveColorSpace: throws an NSGenericException if called without first obtaining exclusive access to the receiver using -lockForConfiguration:.
2557        #[unsafe(method(activeColorSpace))]
2558        #[unsafe(method_family = none)]
2559        pub unsafe fn activeColorSpace(&self) -> AVCaptureColorSpace;
2560
2561        /// Setter for [`activeColorSpace`][Self::activeColorSpace].
2562        #[unsafe(method(setActiveColorSpace:))]
2563        #[unsafe(method_family = none)]
2564        pub unsafe fn setActiveColorSpace(&self, active_color_space: AVCaptureColorSpace);
2565    );
2566}
2567
2568/// AVCaptureDeviceDepthSupport.
2569impl AVCaptureDevice {
2570    extern_methods!(
2571        /// The currently active depth data format of the receiver.
2572        ///
2573        ///
2574        /// This property can be used to get or set the device's currently active depth data format. -setActiveDepthDataFormat: throws an NSInvalidArgumentException if set to a format not present in the activeFormat's -supportedDepthDataFormats array. -setActiveDepthDataFormat: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. Clients can observe automatic changes to the receiver's activeDepthDataFormat by key value observing this property. On devices where depth data is not supported, this property returns nil.
2575        ///
2576        /// The frame rate of depth data may not be set directly. Depth data frame rate is synchronized to the device's activeMin/MaxFrameDurations. It may match the device's current frame rate, or lower, if depth data cannot be produced fast enough for the active video frame rate.
2577        ///
2578        /// Delivery of depth data to a AVCaptureDepthDataOutput may increase the system load, resulting in a reduced video frame rate for thermal sustainability.
2579        #[unsafe(method(activeDepthDataFormat))]
2580        #[unsafe(method_family = none)]
2581        pub unsafe fn activeDepthDataFormat(&self) -> Option<Retained<AVCaptureDeviceFormat>>;
2582
2583        /// Setter for [`activeDepthDataFormat`][Self::activeDepthDataFormat].
2584        #[unsafe(method(setActiveDepthDataFormat:))]
2585        #[unsafe(method_family = none)]
2586        pub unsafe fn setActiveDepthDataFormat(
2587            &self,
2588            active_depth_data_format: Option<&AVCaptureDeviceFormat>,
2589        );
2590
2591        #[cfg(feature = "objc2-core-media")]
2592        /// A property indicating the receiver's current active minimum depth data frame duration (the reciprocal of its maximum depth data frame rate).
2593        ///
2594        ///
2595        /// This property may be used to set an upper limit to the frame rate at which depth data is produced. Lowering the depth data frame rate typically lowers power consumption which will increase the time the camera can run before an elevated system pressure state is reached.
2596        ///
2597        /// Setting this property to kCMTimeInvalid resets it to the active depth data format's default min frame duration. Setting this property to kCMTimePositiveInfinity results in a depth data frame rate of 0.
2598        ///
2599        /// The activeDepthDataMinFrameDuration gets reset whenever either the active video format or the active depth data format changes.
2600        ///
2601        /// -setActiveDepthDataMinFrameDuration: throws an NSRangeException if set to a value that is outside of the active depth data format's supported frame rate range.
2602        /// -setActiveDepthDataMinFrameDuration: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
2603        #[unsafe(method(activeDepthDataMinFrameDuration))]
2604        #[unsafe(method_family = none)]
2605        pub unsafe fn activeDepthDataMinFrameDuration(&self) -> CMTime;
2606
2607        #[cfg(feature = "objc2-core-media")]
2608        /// Setter for [`activeDepthDataMinFrameDuration`][Self::activeDepthDataMinFrameDuration].
2609        #[unsafe(method(setActiveDepthDataMinFrameDuration:))]
2610        #[unsafe(method_family = none)]
2611        pub unsafe fn setActiveDepthDataMinFrameDuration(
2612            &self,
2613            active_depth_data_min_frame_duration: CMTime,
2614        );
2615
2616        #[cfg(feature = "objc2-core-foundation")]
2617        /// Indicates the minimum zoom factor available for the AVCaptureDevice's videoZoomFactor property.
2618        ///
2619        ///
2620        /// On non-virtual devices the minAvailableVideoZoomFactor is always 1.0. If the device's videoZoomFactor property is assigned a value smaller than 1.0, an NSRangeException is thrown.
2621        /// On a virtual device the minAvailableVideoZoomFactor can change when the device is delivering depth data to one or more outputs (see -[AVCaptureDeviceFormat supportedVideoZoomFactorsForDepthDataDelivery]). When depth data delivery is enabled, allowed zoom factor values are governed by -[AVCaptureDeviceFormat supportedVideoZoomFactorsForDepthDataDelivery] and this contains the absolute minimum zoom of all allowed zoom factors.
2622        /// Setting the videoZoomFactor to a value greater than or equal to 1.0, but lower than minAvailableVideoZoomFactor results in the value being clamped to the minAvailableVideoZoomFactor. Clients can key value observe the value of this property.
2623        #[unsafe(method(minAvailableVideoZoomFactor))]
2624        #[unsafe(method_family = none)]
2625        pub unsafe fn minAvailableVideoZoomFactor(&self) -> CGFloat;
2626
2627        #[cfg(feature = "objc2-core-foundation")]
2628        /// Indicates the maximum zoom factor available for the AVCaptureDevice's videoZoomFactor property.
2629        ///
2630        ///
2631        /// On non-virtual devices the maxAvailableVideoZoomFactor is always equal to the activeFormat.videoMaxZoomFactor. If the device's videoZoomFactor property is assigned a value greater than activeFormat.videoMaxZoomFactor, an NSRangeException is thrown.
2632        /// On a virtual device the maxAvailableVideoZoomFactor can change when the device is delivering depth data to one or more outputs (see -[AVCaptureDeviceFormat supportedVideoZoomFactorsForDepthDataDelivery]). When depth data delivery is enabled, allowed zoom factor values are governed by -[AVCaptureDeviceFormat supportedVideoZoomFactorsForDepthDataDelivery] and this contains the absolute maximum zoom of all allowed zoom factors.
2633        /// Setting the videoZoomFactor to a value less than or equal to activeFormat.videoMaxZoomFactor, but greater than maxAvailableVideoZoomFactor results in the value being clamped to the maxAvailableVideoZoomFactor. Clients can key value observe the value of this property.
2634        #[unsafe(method(maxAvailableVideoZoomFactor))]
2635        #[unsafe(method_family = none)]
2636        pub unsafe fn maxAvailableVideoZoomFactor(&self) -> CGFloat;
2637    );
2638}
2639
2640/// AVCaptureDeviceGeometricDistortionCorrection.
2641impl AVCaptureDevice {
2642    extern_methods!(
2643        /// Indicates that geometric distortion correction is supported by the receiver.
2644        ///
2645        ///
2646        /// Some AVCaptureDevices benefit from geometric distortion correction (GDC), such as devices with a very wide field of view. GDC lessens the fisheye effect at the outer edge of the frame at the cost of losing a small amount of vertical and horizontal field of view. When GDC is enabled on the AVCaptureDevice (see geometricDistortionCorrectionEnabled), the corrected image is upscaled to the original image size when needed.  With respect to the AVCaptureDevice.videoZoomFactor API, the full viewable field of view is always represented with a videoZoomFactor of 1.0. Thus, when GDC is enabled, the AVCaptureDevice.activeFormat's field of view at videoZoomFactor = 1.0 will be different than when GDC is disabled. The smaller field of view is reported through the activeFormat's geometricDistortionCorrectedVideoFieldOfView property. Beware though that RAW photo captures never have GDC applied, regardless of the value of AVCaptureDevice.geometricDistortionCorrectionEnabled.
2647        #[unsafe(method(isGeometricDistortionCorrectionSupported))]
2648        #[unsafe(method_family = none)]
2649        pub unsafe fn isGeometricDistortionCorrectionSupported(&self) -> bool;
2650
2651        /// Indicates whether geometric distortion correction is enabled by the receiver.
2652        ///
2653        ///
2654        /// Where supported, the default value is YES. The receiver must be locked for configuration using lockForConfiguration: before clients can set this method, otherwise an NSGenericException is thrown.
2655        ///
2656        /// In the case of ProRes RAW, when geometricDistortionCorrectionEnabled is YES, GDC is applied to your outputs in different ways:
2657        /// - It is always applied to AVCaptureVideoPreviewLayer.
2658        /// - It is applied to AVCaptureVideoDataOutput only if deliversPreviewSizedOutputBuffers is set to YES.
2659        /// - It is never applied to AVCaptureMovieFileOutput.
2660        ///
2661        /// When GDC is enabled, AVCaptureVideoDataOutput buffers contain GDC metadata attachments, and AVCaptureMovieFileOutput movies contain GDC metadata which an application supporting ProRes RAW can optionally apply at playback time using the ProRes RAW SDK. To learn more about the ProRes RAW SDK, refer to the Apple ProRes and ProRes RAW Authorized Products article at https://support.apple.com/en-us/118584.
2662        #[unsafe(method(isGeometricDistortionCorrectionEnabled))]
2663        #[unsafe(method_family = none)]
2664        pub unsafe fn isGeometricDistortionCorrectionEnabled(&self) -> bool;
2665
2666        /// Setter for [`isGeometricDistortionCorrectionEnabled`][Self::isGeometricDistortionCorrectionEnabled].
2667        #[unsafe(method(setGeometricDistortionCorrectionEnabled:))]
2668        #[unsafe(method_family = none)]
2669        pub unsafe fn setGeometricDistortionCorrectionEnabled(
2670            &self,
2671            geometric_distortion_correction_enabled: bool,
2672        );
2673    );
2674}
2675
2676/// AVCaptureDeviceCalibration.
2677impl AVCaptureDevice {
2678    extern_methods!(
2679        /// An NSData containing the relative extrinsic matrix from one AVCaptureDevice to another.
2680        ///
2681        /// Parameter `fromDevice`: The AVCaptureDevice to use as the source. Must be non nil or an NSInvalidArgumentException is thrown.
2682        ///
2683        /// Parameter `toDevice`: The AVCaptureDevice to use as the destination. Must be non nil or an NSInvalidArgumentException is thrown.
2684        ///
2685        ///
2686        /// The extrinsic matrix consists of a unitless 3x3 rotation matrix (R) on the left and a translation (t) 3x1 column vector on the right. The translation vector's units are millimeters. The extrinsics of the "toDevice" camera are expressed with respect to a reference camera "fromDevice". If X_from is a 3D point in "fromCamera"'s coordinate system, then it can be projected into "toCamera"'s coordinate system with X_to = [R | t] * X_from. Note that a matrix_float4x3 matrix is column major with 3 rows and 4 columns. The extrinsicMatrix is only provided for physical cameras for which factory calibrations exist. Virtual device cameras return nil.
2687        /// /
2688        /// \
2689        /// /
2690        /// \
2691        /// | r1,1  r1,2  r1,3 | t1 |
2692        /// |R|t| = | r2,1  r2,2  r2,3 | t2 |
2693        /// \
2694        /// /   | r3,1  r3,2  r3,3 | t3 |
2695        /// \
2696        /// /
2697        ///
2698        /// Note that if you enable video stabilization (see AVCaptureConnection.preferredVideoStabilizationMode), the pixels in stabilized video frames no longer match the relative extrinsicMatrix from one device to another due to warping. The extrinsicMatrix and camera intrinsics should only be used when video stabilization is disabled.
2699        #[unsafe(method(extrinsicMatrixFromDevice:toDevice:))]
2700        #[unsafe(method_family = none)]
2701        pub unsafe fn extrinsicMatrixFromDevice_toDevice(
2702            from_device: &AVCaptureDevice,
2703            to_device: &AVCaptureDevice,
2704        ) -> Option<Retained<NSData>>;
2705    );
2706}
2707
2708/// Constants indicating the current Center Stage control mode.
2709///
2710///
2711/// Indicates that the application is unaware of the Center Stage feature. Its enablement is entirely under user control in Control Center.
2712///
2713/// Indicates that the application controls the Center Stage feature, disallowing input from the user in Control Center.
2714///
2715/// Indicates that both the user and application cooperatively share control of the Center Stage feature.
2716///
2717/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturecenterstagecontrolmode?language=objc)
2718// NS_ENUM
2719#[repr(transparent)]
2720#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
2721pub struct AVCaptureCenterStageControlMode(pub NSInteger);
2722impl AVCaptureCenterStageControlMode {
2723    #[doc(alias = "AVCaptureCenterStageControlModeUser")]
2724    pub const User: Self = Self(0);
2725    #[doc(alias = "AVCaptureCenterStageControlModeApp")]
2726    pub const App: Self = Self(1);
2727    #[doc(alias = "AVCaptureCenterStageControlModeCooperative")]
2728    pub const Cooperative: Self = Self(2);
2729}
2730
2731unsafe impl Encode for AVCaptureCenterStageControlMode {
2732    const ENCODING: Encoding = NSInteger::ENCODING;
2733}
2734
2735unsafe impl RefEncode for AVCaptureCenterStageControlMode {
2736    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
2737}
2738
2739/// AVCaptureDeviceCenterStage.
2740impl AVCaptureDevice {
2741    extern_methods!(
2742        /// A class property indicating the current mode of Center Stage control (user, app, or cooperative).
2743        ///
2744        ///
2745        /// This class property determines how the Center Stage feature is controlled. When set to the default value of AVCaptureCenterStageControlModeUser, centerStageEnabled may not be set programmatically and throws an NSInvalidArgumentException. In User mode, the feature may only be set by the user in Control Center. If you wish to take Center Stage control away from the user and exclusively enable / disable it programmatically, set this property to AVCaptureCenterStageControlModeApp. When under exclusive app control, Center Stage user control is disallowed (for instance, the toggle is grayed out in Control Center). If you wish to take control of Center Stage, but also cooperate with the user by listening for and appropriately reacting to their changes to the centerStageEnabled property, set this property to AVCaptureCenterStageControlModeCooperative. Note that in this mode, the onus is on you, the app developer, to honor user intent and conform your AVCaptureSession configuration to make Center Stage active (see the AVCaptureDevice instance property centerStageActive). In cooperative mode, the centerStageEnabled property may change at any time (such as when the user enables / disables the feature in Control Center).
2746        #[unsafe(method(centerStageControlMode))]
2747        #[unsafe(method_family = none)]
2748        pub unsafe fn centerStageControlMode() -> AVCaptureCenterStageControlMode;
2749
2750        /// Setter for [`centerStageControlMode`][Self::centerStageControlMode].
2751        #[unsafe(method(setCenterStageControlMode:))]
2752        #[unsafe(method_family = none)]
2753        pub unsafe fn setCenterStageControlMode(
2754            center_stage_control_mode: AVCaptureCenterStageControlMode,
2755        );
2756
2757        /// A class property indicating whether the Center Stage feature is currently enabled or disabled (such as in Control Center or programmatically via your app).
2758        ///
2759        ///
2760        /// This property may only be set if centerStageControlMode is AVCaptureCenterStageControlModeApp or AVCaptureCenterStageControlModeCooperative, and otherwise throws an NSInvalidArgumentException. When centerStageControlMode is AVCaptureCenterStageControlModeUser or AVCaptureCenterStageControlModeCooperative, this property may change according to user desire (such as enabling / disabling the feature in Control Center), so you should key-value observe it.
2761        #[unsafe(method(isCenterStageEnabled))]
2762        #[unsafe(method_family = none)]
2763        pub unsafe fn isCenterStageEnabled() -> bool;
2764
2765        /// Setter for [`isCenterStageEnabled`][Self::isCenterStageEnabled].
2766        #[unsafe(method(setCenterStageEnabled:))]
2767        #[unsafe(method_family = none)]
2768        pub unsafe fn setCenterStageEnabled(center_stage_enabled: bool);
2769
2770        /// Indicates whether Center Stage is currently active on a particular AVCaptureDevice.
2771        ///
2772        ///
2773        /// This readonly property returns YES when Center Stage is currently active on the receiver. When active, the camera automatically adjusts to keep people optimally framed within the field of view. The field of view may pan, tighten or widen as needed. Certain restrictions come into play when Center Stage is active:
2774        /// - The device's minAvailableVideoZoomFactor and maxAvailableVideoZoomFactor become restricted (see AVCaptureDeviceFormat's videoMinZoomFactorForCenterStage and videoMaxZoomFactorForCenterStage).
2775        /// - The device's activeVideoMinFrameDuration and activeVideoMaxFrameDuration are limited (see AVCaptureDeviceFormat's videoFrameRateRangeForCenterStage).
2776        /// Center Stage may be enabled via user control or application control, depending on the current +AVCaptureDevice.centerStageControlMode. When +AVCaptureDevice.centerStageEnabled is YES, a particular AVCaptureDevice instance may return YES for this property, depending whether it supports the feature in its current configuration. Some device features are mutually exclusive to Center Stage:
2777        /// - If depth data delivery is enabled on any output, such as AVCaptureDepthDataOutput, or -AVCapturePhotoOutput.depthDataDeliveryEnabled, Center Stage is deactivated.
2778        /// - If geometricDistortionCorrectionSupported is YES, geometricDistortionCorrectionEnabled must also be YES, or Center Stage is deactivated.
2779        /// This property is key-value observable.
2780        #[unsafe(method(isCenterStageActive))]
2781        #[unsafe(method_family = none)]
2782        pub unsafe fn isCenterStageActive(&self) -> bool;
2783
2784        /// Indicates whether the device supports the Center Stage Rect of Interest feature.
2785        ///
2786        ///
2787        /// This property returns YES if the device supports Center Stage Rect of Interest.
2788        #[unsafe(method(isCenterStageRectOfInterestSupported))]
2789        #[unsafe(method_family = none)]
2790        pub unsafe fn isCenterStageRectOfInterestSupported(&self) -> bool;
2791
2792        #[cfg(feature = "objc2-core-foundation")]
2793        /// Specifies the effective region within the output pixel buffer that will be used to perform Center Stage framing.
2794        ///
2795        ///
2796        /// Applications that wish to apply additional processing (such as cropping) on top of Center Stage's output can use this property to guide Center Stage's framing.
2797        ///
2798        /// The rectangle's origin is top left and is relative to the coordinate space of the output pixel buffer. The default value of this property is the value CGRectMake(0, 0, 1, 1), where {0,0} represents the top left of the picture area, and {1,1} represents the bottom right on an unrotated picture. This rectangle of interest is applied prior to rotation, mirroring or scaling.
2799        ///
2800        /// Pixels outside of this rectangle of interest will be blackened out.
2801        ///
2802        /// Setting this property has no impact on objects specified in the metadata output.
2803        ///
2804        /// -setCenterStageRectOfInterest: throws an NSGenericException if called without first obtaining exclusive access to the receiver using -lockForConfiguration:. -setCenterStageRectOfInterest: throws an NSInvalidArgumentException if none of the AVCaptureDeviceFormats supported by the receiver support CenterStage. -setCenterStageRectOfInterest: throws an NSInvalidArgumentException if +centerStageEnabled is NO on the AVCaptureDevice class. -setCenterStageRectOfInterest: throws an NSInvalidArgumentException if the provided rectOfInterest goes outside the normalized (0-1) coordinate space.
2805        #[unsafe(method(centerStageRectOfInterest))]
2806        #[unsafe(method_family = none)]
2807        pub unsafe fn centerStageRectOfInterest(&self) -> CGRect;
2808
2809        #[cfg(feature = "objc2-core-foundation")]
2810        /// Setter for [`centerStageRectOfInterest`][Self::centerStageRectOfInterest].
2811        #[unsafe(method(setCenterStageRectOfInterest:))]
2812        #[unsafe(method_family = none)]
2813        pub unsafe fn setCenterStageRectOfInterest(&self, center_stage_rect_of_interest: CGRect);
2814    );
2815}
2816
2817/// AVCaptureDevicePortraitEffect.
2818impl AVCaptureDevice {
2819    extern_methods!(
2820        /// A class property indicating whether the Portrait Effect feature is currently enabled in Control Center.
2821        ///
2822        ///
2823        /// This property changes to reflect the Portrait Effect state in Control Center. It is key-value observable. On iOS, Portrait Effect only applies to video conferencing apps by default (apps that use "voip" as one of their UIBackgroundModes). Non video conferencing apps may opt in for the Portrait Effect by adding the following key to their Info.plist:
2824        /// <key
2825        /// >NSCameraPortraitEffectEnabled
2826        /// </key
2827        /// >
2828        /// <true
2829        /// />
2830        #[unsafe(method(isPortraitEffectEnabled))]
2831        #[unsafe(method_family = none)]
2832        pub unsafe fn isPortraitEffectEnabled() -> bool;
2833
2834        /// Indicates whether Portrait Effect is currently active for a particular AVCaptureDevice.
2835        ///
2836        ///
2837        /// This readonly property returns YES when Portrait Effect is currently active on the receiver. When active, the device blurs the background, simulating a shallow depth of field effect. Certain restrictions come into play when Portrait Effect is active:
2838        /// - The device's activeVideoMinFrameDuration and activeVideoMaxFrameDuration are limited (see AVCaptureDeviceFormat's videoFrameRateRangeForPortraitEffect).
2839        /// Note that when +AVCaptureDevice.portraitEffectEnabled is YES, a particular AVCaptureDevice instance may return YES for this property, depending whether it supports the feature in its current configuration.
2840        /// This property is key-value observable.
2841        #[unsafe(method(isPortraitEffectActive))]
2842        #[unsafe(method_family = none)]
2843        pub unsafe fn isPortraitEffectActive(&self) -> bool;
2844    );
2845}
2846
2847/// AVCaptureDeviceReactionEffects.
2848impl AVCaptureDevice {
2849    extern_methods!(
2850        /// A class property indicating whether the application is suitable for reaction effects, either by automatic gesture detection, or by calls to -[AVCaptureDevice performEffectForReaction:]. Reactions are only rendered when the device's activeFormat.reactionEffectsSupported is also YES, which will be reflected by canPerformReactionEffects when the feature is both enabled and supported.
2851        ///
2852        ///
2853        /// On macOS, Reaction Effects are enabled by default for all applications. On iOS, Reaction Effects are enabled by default for video conferencing applications (apps that use "voip" as one of their UIBackgroundModes). Non video conferencing applications may opt in for Reaction Effects by adding the following key to their Info.plist:
2854        /// <key
2855        /// >NSCameraReactionEffectsEnabled
2856        /// </key
2857        /// >
2858        /// <true
2859        /// />
2860        #[unsafe(method(reactionEffectsEnabled))]
2861        #[unsafe(method_family = none)]
2862        pub unsafe fn reactionEffectsEnabled() -> bool;
2863
2864        /// A class property indicating whether gesture detection will trigger reaction effects on the video stream. Gesture detection will only run when the device's activeFormat.reactionEffectsSupported is also YES, which will be reflected by canPerformReactionEffects.
2865        ///
2866        ///
2867        /// This property changes to reflect the Gestures state in Control Center. It is key-value observable. Clients can call performEffectForReaction: independently of whether gesture detection is enabled, reaction effects from either source will be intermixed.
2868        /// By default, gesture detection is enabled.  As of iOS 17.4 and macOS 14.4, applications can control the default value of this property by adding the following key to their Info.plist:
2869        /// <key
2870        /// >NSCameraReactionEffectGesturesEnabledDefault
2871        /// </key
2872        /// >
2873        /// A value of true enables gesture detection and a value of false disables it, until such time that the user makes their own selection in Control Center.
2874        #[unsafe(method(reactionEffectGesturesEnabled))]
2875        #[unsafe(method_family = none)]
2876        pub unsafe fn reactionEffectGesturesEnabled() -> bool;
2877
2878        /// Indicates whether reactions can be performed on a particular AVCaptureDevice. This requires reactionEffectsEnabled to be YES, as well as using a AVCaptureDeviceFormat with reactionEffectsSupported.
2879        ///
2880        ///
2881        /// This readonly property returns YES when resources for reactions are available on the device instance. When YES, calls to performEffectForReaction: will render on the video feed, otherwise those calls are ignored. It is key-value observable.
2882        #[unsafe(method(canPerformReactionEffects))]
2883        #[unsafe(method_family = none)]
2884        pub unsafe fn canPerformReactionEffects(&self) -> bool;
2885
2886        #[cfg(feature = "AVCaptureReactions")]
2887        /// Returns a list of reaction types which can be passed to performEffectForReaction.
2888        ///
2889        ///
2890        /// The list may differ between devices, or be affected by changes to active format, and can be key-value observed.
2891        #[unsafe(method(availableReactionTypes))]
2892        #[unsafe(method_family = none)]
2893        pub unsafe fn availableReactionTypes(&self) -> Retained<NSSet<AVCaptureReactionType>>;
2894
2895        #[cfg(feature = "AVCaptureReactions")]
2896        /// Triggers a specified reaction on the video stream.
2897        ///
2898        ///
2899        /// Parameter `reactionType`: Indicates which reaction to perform.
2900        ///
2901        ///
2902        /// The entries in reactionEffectsInProgress may not reflect one-to-one against calls to this method. Depending on reaction style or resource limits, triggering multiple overlapping reactions of the same type may be coalesced into extending an existing reaction rather than overlaying a new one.
2903        ///
2904        /// The reactionType requested must be one of those listed in availableReactionTypes or an exception will be thrown. Performing a reaction when canPerformReactionEffects is NO is ignored, and VoIP applications are encouraged to transmit and display such reactions outside of the video feed.
2905        #[unsafe(method(performEffectForReaction:))]
2906        #[unsafe(method_family = none)]
2907        pub unsafe fn performEffectForReaction(&self, reaction_type: &AVCaptureReactionType);
2908
2909        #[cfg(feature = "AVCaptureReactions")]
2910        /// Contains an array of reaction effects that are currently being performed by the device, sorted by timestamp. If observing old and new values in the KVO callback, the reaction effects which are still running in the new array will have kCMTimeInvalid as their endTime property. Reaction effects which have ended will only be in the old array, and will have their endTime property set to the presentation time of the first frame where the reaction effect was no longer present.
2911        ///
2912        ///
2913        /// Reaction effects which are triggered by either a call to performEffectForReaction: or by the automatic gesture detection will be reflected in this array. It is key-value observable to be notified when reaction effects begin or end.
2914        #[unsafe(method(reactionEffectsInProgress))]
2915        #[unsafe(method_family = none)]
2916        pub unsafe fn reactionEffectsInProgress(
2917            &self,
2918        ) -> Retained<NSArray<AVCaptureReactionEffectState>>;
2919    );
2920}
2921
2922/// AVCaptureDeviceBackgroundReplacement.
2923impl AVCaptureDevice {
2924    extern_methods!(
2925        /// A class property indicating whether the user has enabled the Background Replacement feature for this application.
2926        #[unsafe(method(isBackgroundReplacementEnabled))]
2927        #[unsafe(method_family = none)]
2928        pub unsafe fn isBackgroundReplacementEnabled() -> bool;
2929
2930        /// Indicates whether Background Replacement is currently active on a particular AVCaptureDevice.
2931        ///
2932        ///
2933        /// This property is key-value observable.
2934        #[unsafe(method(isBackgroundReplacementActive))]
2935        #[unsafe(method_family = none)]
2936        pub unsafe fn isBackgroundReplacementActive(&self) -> bool;
2937    );
2938}
2939
2940/// AVCaptureDeviceContinuityCamera.
2941impl AVCaptureDevice {
2942    extern_methods!(
2943        /// A property that reports YES if the receiver is a Continuity Camera.
2944        ///
2945        ///
2946        /// Access this property to discover if the receiver is a Continuity Camera (external iPhone webcam).
2947        #[unsafe(method(isContinuityCamera))]
2948        #[unsafe(method_family = none)]
2949        pub unsafe fn isContinuityCamera(&self) -> bool;
2950    );
2951}
2952
2953/// AVCaptureDeviceDeskViewCamera.
2954impl AVCaptureDevice {
2955    extern_methods!(
2956        /// A reference to the Desk View Camera that is associated with and derived from this camera.
2957        ///
2958        ///
2959        /// The companionDeskViewCamera property allows you to discover if the receiver has a paired Desk View Camera which derives its desk framing from the receiver's ultra wide frame. In the presence of multiple Continuity Cameras, this property allows you to pair a particular Continuity Camera with its associated Desk View Camera.
2960        #[unsafe(method(companionDeskViewCamera))]
2961        #[unsafe(method_family = none)]
2962        pub unsafe fn companionDeskViewCamera(&self) -> Option<Retained<AVCaptureDevice>>;
2963    );
2964}
2965
2966/// Constants describing microphone filtering modes.
2967///
2968///
2969/// Indicates that microphone audio is being processed with standard voice DSP.
2970///
2971/// Indicates that microphone audio processing is minimized to capture all sounds in the room.
2972///
2973/// Indicates that microphone audio is being processed to isolate the voice and attenuate other signals.
2974///
2975/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturemicrophonemode?language=objc)
2976// NS_ENUM
2977#[repr(transparent)]
2978#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
2979pub struct AVCaptureMicrophoneMode(pub NSInteger);
2980impl AVCaptureMicrophoneMode {
2981    #[doc(alias = "AVCaptureMicrophoneModeStandard")]
2982    pub const Standard: Self = Self(0);
2983    #[doc(alias = "AVCaptureMicrophoneModeWideSpectrum")]
2984    pub const WideSpectrum: Self = Self(1);
2985    #[doc(alias = "AVCaptureMicrophoneModeVoiceIsolation")]
2986    pub const VoiceIsolation: Self = Self(2);
2987}
2988
2989unsafe impl Encode for AVCaptureMicrophoneMode {
2990    const ENCODING: Encoding = NSInteger::ENCODING;
2991}
2992
2993unsafe impl RefEncode for AVCaptureMicrophoneMode {
2994    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
2995}
2996
2997/// AVCaptureMicrophoneMode.
2998impl AVCaptureDevice {
2999    extern_methods!(
3000        /// Indicates the microphone mode that has been selected by the user in Control Center.
3001        ///
3002        ///
3003        /// This readonly property returns the microphone mode selected by the user in Control Center. It is key-value observable.
3004        #[unsafe(method(preferredMicrophoneMode))]
3005        #[unsafe(method_family = none)]
3006        pub unsafe fn preferredMicrophoneMode() -> AVCaptureMicrophoneMode;
3007
3008        /// Indicates the currently active microphone mode.
3009        ///
3010        ///
3011        /// This readonly property returns the currently active microphone mode, which may differ from the preferredMicrophoneMode if the application's active audio route does not support the preferred microphone mode. This property is key-value observable.
3012        #[unsafe(method(activeMicrophoneMode))]
3013        #[unsafe(method_family = none)]
3014        pub unsafe fn activeMicrophoneMode() -> AVCaptureMicrophoneMode;
3015    );
3016}
3017
3018/// Constants describing the system user interfaces available to +showSystemUserInterface:.
3019///
3020///
3021/// Indicates the system UI for enabling / disabling video effects.
3022///
3023/// Indicates the system UI for selecting microphone modes.
3024///
3025/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturesystemuserinterface?language=objc)
3026// NS_ENUM
3027#[repr(transparent)]
3028#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
3029pub struct AVCaptureSystemUserInterface(pub NSInteger);
3030impl AVCaptureSystemUserInterface {
3031    #[doc(alias = "AVCaptureSystemUserInterfaceVideoEffects")]
3032    pub const VideoEffects: Self = Self(1);
3033    #[doc(alias = "AVCaptureSystemUserInterfaceMicrophoneModes")]
3034    pub const MicrophoneModes: Self = Self(2);
3035}
3036
3037unsafe impl Encode for AVCaptureSystemUserInterface {
3038    const ENCODING: Encoding = NSInteger::ENCODING;
3039}
3040
3041unsafe impl RefEncode for AVCaptureSystemUserInterface {
3042    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
3043}
3044
3045/// AVCaptureSystemUserInterface.
3046impl AVCaptureDevice {
3047    extern_methods!(
3048        /// Displays the system's user interface for video effects or microphone modes.
3049        ///
3050        ///
3051        /// Parameter `systemUserInterface`: The system UI to show.
3052        ///
3053        ///
3054        /// This method allows the calling application to prompt the user to make changes to Video Effects (such as Center Stage or the Portrait Effect) or Microphone Modes. It brings up the system user interface and deep links to the appropriate module. This method is non-blocking. After presenting the desired system user interface, control returns immediately to the application.
3055        #[unsafe(method(showSystemUserInterface:))]
3056        #[unsafe(method_family = none)]
3057        pub unsafe fn showSystemUserInterface(system_user_interface: AVCaptureSystemUserInterface);
3058    );
3059}
3060
3061/// AVSpatialCaptureDiscomfortReason string constants
3062///
3063///
3064/// The AVSpatialCaptureDiscomfortReason string constants are used to report the applicability of the current scene to create a comfortable viewing experience.
3065///
3066/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avspatialcapturediscomfortreason?language=objc)
3067// NS_TYPED_ENUM
3068pub type AVSpatialCaptureDiscomfortReason = NSString;
3069
3070extern "C" {
3071    /// The lighting of the current scene is not bright enough; the playback experience will likely be uncomfortable due to noise differences between the two cameras.
3072    ///
3073    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avspatialcapturediscomfortreasonnotenoughlight?language=objc)
3074    pub static AVSpatialCaptureDiscomfortReasonNotEnoughLight:
3075        &'static AVSpatialCaptureDiscomfortReason;
3076}
3077
3078extern "C" {
3079    /// The focus point of the current scene is too close; the playback experience will likely be uncomfortable due to the subject being closer than the minimum focus distance of one or both of the lenses.
3080    ///
3081    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avspatialcapturediscomfortreasonsubjecttooclose?language=objc)
3082    pub static AVSpatialCaptureDiscomfortReasonSubjectTooClose:
3083        &'static AVSpatialCaptureDiscomfortReason;
3084}
3085
3086/// AVCaptureDeviceSpatialCapture.
3087impl AVCaptureDevice {
3088    extern_methods!(
3089        /// Indicates whether or not the current environmental conditions are amenable to a spatial capture that is comfortable to view.
3090        ///
3091        ///
3092        /// This property can be monitored in order to determine the presentation of UI elements to inform the user that they should reframe their scene for a more pleasing spatial capture ("subject is too close", "scene is too dark").
3093        #[unsafe(method(spatialCaptureDiscomfortReasons))]
3094        #[unsafe(method_family = none)]
3095        pub unsafe fn spatialCaptureDiscomfortReasons(
3096            &self,
3097        ) -> Retained<NSSet<AVSpatialCaptureDiscomfortReason>>;
3098    );
3099}
3100
3101/// An informative status about the scene observed by the device.
3102///
3103/// Some features have certain requirements on the scene (lighting condition for Cinematic Video, for example) to produce optimal results; these ``AVCaptureSceneMonitoringStatus`` string constants are used to represent such scene statuses for a given feature.
3104///
3105/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturescenemonitoringstatus?language=objc)
3106// NS_TYPED_ENUM
3107pub type AVCaptureSceneMonitoringStatus = NSString;
3108
3109extern "C" {
3110    /// The light level of the current scene is insufficient for the current set of features to function optimally.
3111    ///
3112    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturescenemonitoringstatusnotenoughlight?language=objc)
3113    pub static AVCaptureSceneMonitoringStatusNotEnoughLight:
3114        &'static AVCaptureSceneMonitoringStatus;
3115}
3116
3117/// AVCaptureDeviceCinematicVideoCapture.
3118impl AVCaptureDevice {
3119    extern_methods!(
3120        /// The current scene monitoring statuses related to Cinematic Video capture.
3121        ///
3122        /// Monitor this property via key-value observation to present a UI informing the user that they should reframe their scene for a better Cinematic Video experience ("scene is too dark").
3123        #[unsafe(method(cinematicVideoCaptureSceneMonitoringStatuses))]
3124        #[unsafe(method_family = none)]
3125        pub unsafe fn cinematicVideoCaptureSceneMonitoringStatuses(
3126            &self,
3127        ) -> Retained<NSSet<AVCaptureSceneMonitoringStatus>>;
3128    );
3129}
3130
3131/// String constants describing the different video aspect ratios you can configure for a particular device.
3132///
3133/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcaptureaspectratio?language=objc)
3134// NS_TYPED_ENUM
3135pub type AVCaptureAspectRatio = NSString;
3136
3137extern "C" {
3138    /// An aspect ratio of 1x1.
3139    ///
3140    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcaptureaspectratio1x1?language=objc)
3141    pub static AVCaptureAspectRatio1x1: &'static AVCaptureAspectRatio;
3142}
3143
3144extern "C" {
3145    /// An aspect ratio of 16x9.
3146    ///
3147    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcaptureaspectratio16x9?language=objc)
3148    pub static AVCaptureAspectRatio16x9: &'static AVCaptureAspectRatio;
3149}
3150
3151extern "C" {
3152    /// An aspect ratio of 9x16.
3153    ///
3154    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcaptureaspectratio9x16?language=objc)
3155    pub static AVCaptureAspectRatio9x16: &'static AVCaptureAspectRatio;
3156}
3157
3158extern "C" {
3159    /// An aspect ratio of 4x3.
3160    ///
3161    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcaptureaspectratio4x3?language=objc)
3162    pub static AVCaptureAspectRatio4x3: &'static AVCaptureAspectRatio;
3163}
3164
3165extern "C" {
3166    /// An aspect ratio of 3x4.
3167    ///
3168    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcaptureaspectratio3x4?language=objc)
3169    pub static AVCaptureAspectRatio3x4: &'static AVCaptureAspectRatio;
3170}
3171
3172/// DynamicAspectRatio.
3173impl AVCaptureDevice {
3174    extern_methods!(
3175        /// A key-value observable property indicating the current aspect ratio for a device.
3176        ///
3177        /// This property is initialized to the first ``AVCaptureAspectRatio`` listed in the device's activeFormat's ``AVCaptureDeviceFormat/supportedDynamicAspectRatios`` property. If the activeFormat's ``AVCaptureDeviceFormat/supportedDynamicAspectRatios`` is an empty array, this property returns nil.
3178        #[unsafe(method(dynamicAspectRatio))]
3179        #[unsafe(method_family = none)]
3180        pub unsafe fn dynamicAspectRatio(&self) -> Option<Retained<AVCaptureAspectRatio>>;
3181
3182        #[cfg(feature = "objc2-core-media")]
3183        /// A key-value observable property describing the output dimensions of the video buffer based on the device's dynamic aspect ratio.
3184        ///
3185        /// If the device's activeFormat's ``AVCaptureDeviceFormat/supportedDynamicAspectRatios`` is an empty array, this property returns {0,0}.
3186        #[unsafe(method(dynamicDimensions))]
3187        #[unsafe(method_family = none)]
3188        pub unsafe fn dynamicDimensions(&self) -> CMVideoDimensions;
3189
3190        #[cfg(all(feature = "block2", feature = "objc2-core-media"))]
3191        /// Updates the dynamic aspect ratio of the device.
3192        ///
3193        /// - Parameter dynamicAspectRatio: The new ``AVCaptureAspectRatio`` the device should output.
3194        /// - Parameter handler: A block called by the device when `dynamicAspectRatio` is set to the value specified. If you call ``setDynamicAspectRatio:completionHandler:`` multiple times, the completion handlers are called in FIFO order. The block receives a timestamp which matches that of the first buffer to which all settings have been applied. Note that the timestamp is synchronized to the device clock, and thus must be converted to the ``AVCaptureSession/synchronizationClock`` prior to comparison with the timestamps of buffers delivered via an ``AVCaptureVideoDataOutput``. You may pass `nil` for the `handler` parameter if you do not need to know when the operation completes.
3195        ///
3196        /// This is the only way of setting ``dynamicAspectRatio``. This method throws an `NSInvalidArgumentException` if `dynamicAspectRatio` is not a supported aspect ratio found in the device's activeFormat's ``AVCaptureDeviceFormat/supportedDynamicAspectRatios``. This method throws an `NSGenericException` if you call it without first obtaining exclusive access to the device using ``AVCaptureDevice/lockForConfiguration:``.
3197        #[unsafe(method(setDynamicAspectRatio:completionHandler:))]
3198        #[unsafe(method_family = none)]
3199        pub unsafe fn setDynamicAspectRatio_completionHandler(
3200            &self,
3201            dynamic_aspect_ratio: &AVCaptureAspectRatio,
3202            handler: Option<&block2::DynBlock<dyn Fn(CMTime, *mut NSError)>>,
3203        );
3204    );
3205}
3206
3207/// AVCaptureDeviceSmartFraming.
3208impl AVCaptureDevice {
3209    extern_methods!(
3210        /// A monitor owned by the device that recommends an optimal framing based on the content in the scene.
3211        ///
3212        /// An ultra wide camera device that supports dynamic aspect ratio configuration may also support "smart framing monitoring". If this property returns non `nil`, you may use it to listen for framing recommendations by configuring its ``AVCaptureSmartFramingMonitor/enabledFramings`` and calling ``AVCaptureSmartFramingMonitor/startMonitoringWithError:``. The smart framing monitor only makes recommendations when the current ``AVCaptureDevice/activeFormat`` supports smart framing (see ``AVCaptureDeviceFormat/smartFramingSupported``).
3213        #[unsafe(method(smartFramingMonitor))]
3214        #[unsafe(method_family = none)]
3215        pub unsafe fn smartFramingMonitor(&self) -> Option<Retained<AVCaptureSmartFramingMonitor>>;
3216    );
3217}
3218
3219extern_class!(
3220    /// An object associated with a capture device that monitors the scene and suggests an optimal framing.
3221    ///
3222    /// A smart framing monitor observes its associated device for objects of interest entering and exiting the camera's field of view and recommends an optimal framing for good photographic composition. This framing recommendation consists of an aspect ratio and zoom factor. You may respond to the device's framing recommendation by calling ``AVCaptureDevice/setDynamicAspectRatio:completionHandler:`` and setting ``AVCaptureDevice/videoZoomFactor`` on the associated device in whatever order best matches your animation between old and new framings.
3223    ///
3224    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturesmartframingmonitor?language=objc)
3225    #[unsafe(super(NSObject))]
3226    #[derive(Debug, PartialEq, Eq, Hash)]
3227    pub struct AVCaptureSmartFramingMonitor;
3228);
3229
3230extern_conformance!(
3231    unsafe impl NSObjectProtocol for AVCaptureSmartFramingMonitor {}
3232);
3233
3234impl AVCaptureSmartFramingMonitor {
3235    extern_methods!(
3236        #[unsafe(method(init))]
3237        #[unsafe(method_family = init)]
3238        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
3239
3240        #[unsafe(method(new))]
3241        #[unsafe(method_family = new)]
3242        pub unsafe fn new() -> Retained<Self>;
3243
3244        /// An array of framings supported by the monitor in its current configuration.
3245        ///
3246        /// The monitor is capable of recommending any of the framings in this array. This property is key-value observable and may change as the target capture device's ``AVCaptureDevice/activeFormat`` property changes. This array contains the full set of framings supported by the monitor in the device's current configuration. You must tell the monitor which smart framings you are interested in having recommended to you by setting the ``enabledFramings`` property.
3247        #[unsafe(method(supportedFramings))]
3248        #[unsafe(method_family = none)]
3249        pub unsafe fn supportedFramings(&self) -> Retained<NSArray<AVCaptureFraming>>;
3250
3251        /// An array of framings that the monitor is allowed to suggest.
3252        ///
3253        /// The monitor is capable of recommending any of the framings in the ``supportedFramings`` array. This property contains the subset of ``supportedFramings`` you would like to have recommended to you. You may set this property at any time while running your ``AVCaptureSession``. This property's default value is the empty array.
3254        #[unsafe(method(enabledFramings))]
3255        #[unsafe(method_family = none)]
3256        pub unsafe fn enabledFramings(&self) -> Retained<NSArray<AVCaptureFraming>>;
3257
3258        /// Setter for [`enabledFramings`][Self::enabledFramings].
3259        ///
3260        /// This is [copied][objc2_foundation::NSCopying::copy] when set.
3261        #[unsafe(method(setEnabledFramings:))]
3262        #[unsafe(method_family = none)]
3263        pub unsafe fn setEnabledFramings(&self, enabled_framings: &NSArray<AVCaptureFraming>);
3264
3265        /// The latest recommended framing from the monitor.
3266        ///
3267        /// While your ``AVCaptureSession`` is running, the monitor continuously observes its device's scene to recommend the best framing. This recommended framing is always one of the values in ``enabledFramings``. This property may return `nil` if smart framing isn't supported for the device in its current configuration. Its default value is `nil`. This property is key-value observable, and when you observe a change, you may respond to the new recommendation by calling ``AVCaptureDevice/setDynamicAspectRatio:completionHandler:`` and setting ``AVCaptureDevice/videoZoomFactor`` on the associated device in whatever order best matches your animation between old and new framings.
3268        #[unsafe(method(recommendedFraming))]
3269        #[unsafe(method_family = none)]
3270        pub unsafe fn recommendedFraming(&self) -> Option<Retained<AVCaptureFraming>>;
3271
3272        /// Begins monitoring the device's active scene and making framing recommendations.
3273        ///
3274        /// - Parameter outError: A pointer to an ``NSError`` indicating why ``startMonitoringWithError:`` failed, or to a `nil` ``NSError`` on success.
3275        /// - Returns: `true` if successful, `false` if monitoring could not be started.
3276        ///
3277        /// The monitor's ``recommendedFraming`` is `nil` when it is not actively running. Call this method to start monitoring. You may start monitoring before or after calling ``AVCaptureSession/startRunning``,  and you may stop active monitoring without stopping the capture session by calling ``stopMonitoring`` at any time, but you must set ``enabledFramings`` before running your capture session so that the monitor is prepared for your desired framing recommendations. While the monitor is running, you may set ``enabledFramings`` at any time to change the framing choices the monitor should consider in its recommendations.
3278        #[unsafe(method(startMonitoringWithError:_))]
3279        #[unsafe(method_family = none)]
3280        pub unsafe fn startMonitoringWithError(&self) -> Result<(), Retained<NSError>>;
3281
3282        /// Stops monitoring the device's active scene and making framing recommendations.
3283        ///
3284        /// The monitor's ``recommendedFraming`` is `nil` when it is not actively running. Call this method to stop actively monitoring the scene and making framing recommendations. You may start monitoring before or after calling ``AVCaptureSession/startRunning``, and may stop active monitoring without stopping the capture session by calling ``stopMonitoring`` at any time.
3285        #[unsafe(method(stopMonitoring))]
3286        #[unsafe(method_family = none)]
3287        pub unsafe fn stopMonitoring(&self);
3288
3289        /// Yes when the receiver is actively monitoring.
3290        ///
3291        /// See ``startMonitoringWithError:`` and ``stopMonitoring``.
3292        #[unsafe(method(isMonitoring))]
3293        #[unsafe(method_family = none)]
3294        pub unsafe fn isMonitoring(&self) -> bool;
3295    );
3296}
3297
3298extern_class!(
3299    /// A framing, consisting of an aspect ratio and a zoom factor.
3300    ///
3301    /// An ``AVCaptureSmartFramingMonitor`` provides framing recommendations using this object.
3302    ///
3303    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcaptureframing?language=objc)
3304    #[unsafe(super(NSObject))]
3305    #[derive(Debug, PartialEq, Eq, Hash)]
3306    pub struct AVCaptureFraming;
3307);
3308
3309extern_conformance!(
3310    unsafe impl NSObjectProtocol for AVCaptureFraming {}
3311);
3312
3313impl AVCaptureFraming {
3314    extern_methods!(
3315        #[unsafe(method(init))]
3316        #[unsafe(method_family = init)]
3317        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
3318
3319        #[unsafe(method(new))]
3320        #[unsafe(method_family = new)]
3321        pub unsafe fn new() -> Retained<Self>;
3322
3323        /// An aspect ratio.
3324        ///
3325        /// One of the enumerated aspect ratios  suitable for use with the ``AVCaptureDevice`` dynamic aspect ratio APIs.
3326        #[unsafe(method(aspectRatio))]
3327        #[unsafe(method_family = none)]
3328        pub unsafe fn aspectRatio(&self) -> Retained<AVCaptureAspectRatio>;
3329
3330        /// A zoom factor.
3331        ///
3332        /// Suitable for use with the ``AVCaptureDevice/videoZoomFactor`` property or ``AVCaptureDevice/rampToVideoZoomFactor:withRate:``.
3333        #[unsafe(method(zoomFactor))]
3334        #[unsafe(method_family = none)]
3335        pub unsafe fn zoomFactor(&self) -> c_float;
3336    );
3337}
3338
3339/// AVCaptureDeviceNominalFocalLengthIn35mmFilm.
3340impl AVCaptureDevice {
3341    extern_methods!(
3342        /// The nominal 35mm equivalent focal length of the capture device's lens.
3343        ///
3344        /// This value represents a nominal measurement of the device's field of view, expressed as a 35mm equivalent focal length, measured diagonally. The value is similar to the `FocalLengthIn35mmFormat` EXIF entry (see
3345        /// <doc
3346        /// ://com.apple.documentation/documentation/imageio/kcgimagepropertyexiffocallenin35mmfilm>) for a photo captured using the device's format where ``AVCaptureDeviceFormat/highestPhotoQualitySupported`` is `true` or when you've configured the session with the ``AVCaptureSessionPresetPhoto`` preset.
3347        ///
3348        /// This property value is `0` for virtual devices and external cameras.
3349        #[unsafe(method(nominalFocalLengthIn35mmFilm))]
3350        #[unsafe(method_family = none)]
3351        pub unsafe fn nominalFocalLengthIn35mmFilm(&self) -> c_float;
3352    );
3353}
3354
3355extern_class!(
3356    /// The AVCaptureDeviceDiscoverySession allows clients to search for devices by certain criteria.
3357    ///
3358    ///
3359    /// This class allows clients to discover devices by providing certain search criteria. The objective of this class is to help find devices by device type and optionally by media type or position and allow you to key-value observe changes to the returned devices list.
3360    ///
3361    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturedevicediscoverysession?language=objc)
3362    #[unsafe(super(NSObject))]
3363    #[derive(Debug, PartialEq, Eq, Hash)]
3364    pub struct AVCaptureDeviceDiscoverySession;
3365);
3366
3367extern_conformance!(
3368    unsafe impl NSObjectProtocol for AVCaptureDeviceDiscoverySession {}
3369);
3370
3371impl AVCaptureDeviceDiscoverySession {
3372    extern_methods!(
3373        #[unsafe(method(init))]
3374        #[unsafe(method_family = init)]
3375        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
3376
3377        #[unsafe(method(new))]
3378        #[unsafe(method_family = new)]
3379        pub unsafe fn new() -> Retained<Self>;
3380
3381        #[cfg(feature = "AVMediaFormat")]
3382        /// Returns an AVCaptureDeviceDiscoverySession instance for the given device types, media type, and position.
3383        ///
3384        ///
3385        /// Parameter `deviceTypes`: An array specifying the device types to include in the list of discovered devices.
3386        ///
3387        /// Parameter `mediaType`: The media type, such as AVMediaTypeVideo, AVMediaTypeAudio, or AVMediaTypeMuxed, to include in the list of discovered devices. Pass nil to search for devices with any media type.
3388        ///
3389        /// Parameter `position`: The position to include in the list of discovered devices. Pass AVCaptureDevicePositionUnspecified to search for devices with any position.
3390        ///
3391        /// Returns: The AVCaptureDeviceDiscoverySession from which the list of devices can be obtained.
3392        ///
3393        ///
3394        /// The list of device types is mandatory. This is used to make sure that clients only get access to devices of types they expect. This prevents new device types from automatically being included in the list of devices.
3395        #[unsafe(method(discoverySessionWithDeviceTypes:mediaType:position:))]
3396        #[unsafe(method_family = none)]
3397        pub unsafe fn discoverySessionWithDeviceTypes_mediaType_position(
3398            device_types: &NSArray<AVCaptureDeviceType>,
3399            media_type: Option<&AVMediaType>,
3400            position: AVCaptureDevicePosition,
3401        ) -> Retained<Self>;
3402
3403        /// The list of devices that comply to the search criteria specified on the discovery session.
3404        ///
3405        ///
3406        /// The returned array contains only devices that are available at the time the method is called. Applications can key-value observe this property to be notified when the list of available devices has changed. For apps linked against iOS 10, the devices returned are unsorted. For apps linked against iOS 11 or later, the devices are sorted by AVCaptureDeviceType, matching the order specified in the deviceTypes parameter of +[AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:mediaType:position:]. If a position of AVCaptureDevicePositionUnspecified is specified, the results are further ordered by position in the AVCaptureDevicePosition enum. Starting in Mac Catalyst 14.0, clients can key value observe the value of this property to be notified when the devices change.
3407        #[unsafe(method(devices))]
3408        #[unsafe(method_family = none)]
3409        pub unsafe fn devices(&self) -> Retained<NSArray<AVCaptureDevice>>;
3410
3411        /// An array of sets of AVCaptureDevices that are allowed to be used simultaneously in an AVCaptureMultiCamSession.
3412        ///
3413        ///
3414        /// When using an AVCaptureMultiCamSession, multiple cameras may be used as device inputs to the session, so long as they are included in one of the supportedMultiCamDeviceSets. Starting in Mac Catalyst 14.0, clients can key value observe the value of this property to be notified when the device sets change.
3415        #[unsafe(method(supportedMultiCamDeviceSets))]
3416        #[unsafe(method_family = none)]
3417        pub unsafe fn supportedMultiCamDeviceSets(
3418            &self,
3419        ) -> Retained<NSArray<NSSet<AVCaptureDevice>>>;
3420    );
3421}
3422
3423extern_class!(
3424    /// The AVCaptureDeviceRotationCoordinator allows clients to monitor rotations of a given AVCaptureDevice instance and be provided the video rotation angle that should be applied for horizon-level preview and capture relative to gravity.
3425    ///
3426    ///
3427    /// Each instance of AVCaptureDeviceRotationCoordinator allows a client to coordinate with changes to the rotation of an AVCaptureDevice to ensure the camera's video preview and captured output are horizon-level. The coordinator delivers key-value updates on the main queue.
3428    ///
3429    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturedevicerotationcoordinator?language=objc)
3430    #[unsafe(super(NSObject))]
3431    #[derive(Debug, PartialEq, Eq, Hash)]
3432    pub struct AVCaptureDeviceRotationCoordinator;
3433);
3434
3435extern_conformance!(
3436    unsafe impl NSObjectProtocol for AVCaptureDeviceRotationCoordinator {}
3437);
3438
3439impl AVCaptureDeviceRotationCoordinator {
3440    extern_methods!(
3441        #[unsafe(method(init))]
3442        #[unsafe(method_family = init)]
3443        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
3444
3445        #[unsafe(method(new))]
3446        #[unsafe(method_family = new)]
3447        pub unsafe fn new() -> Retained<Self>;
3448
3449        #[cfg(feature = "objc2-quartz-core")]
3450        #[cfg(not(target_os = "watchos"))]
3451        /// Returns an AVCaptureDeviceRotationCoordinator instance that provides updates to the amount of rotation that should be applied for horizon-level preview and capture relative to gravity.
3452        ///
3453        ///
3454        /// Parameter `device`: The device for which to monitor rotation.
3455        ///
3456        /// Parameter `previewLayer`: A layer displaying the camera's video preview. If nil, the coordinator will return 0 degrees of rotation for horizon-level preview.
3457        ///
3458        /// Returns: An AVCaptureDeviceRotationCoordinator from which rotation angles for preview and capture can be obtained.
3459        ///
3460        ///
3461        /// An AVCaptureDeviceRotationCoordinator is only applicable to video devices. The given device and layer determine the amount of rotation that should be applied for horizon-level preview and capture.
3462        #[unsafe(method(initWithDevice:previewLayer:))]
3463        #[unsafe(method_family = init)]
3464        pub unsafe fn initWithDevice_previewLayer(
3465            this: Allocated<Self>,
3466            device: &AVCaptureDevice,
3467            preview_layer: Option<&CALayer>,
3468        ) -> Retained<Self>;
3469
3470        /// The the device for which the coordinator provides video rotation angles.
3471        ///
3472        ///
3473        /// The value of this property is the AVCaptureDevice instance that was used to create the coordinator. The coordinator holds a weak reference to the device.
3474        #[unsafe(method(device))]
3475        #[unsafe(method_family = none)]
3476        pub unsafe fn device(&self) -> Option<Retained<AVCaptureDevice>>;
3477
3478        #[cfg(feature = "objc2-quartz-core")]
3479        #[cfg(not(target_os = "watchos"))]
3480        /// The CALayer for which the coordinator calculates video rotation angles for horizon-level preview.
3481        ///
3482        ///
3483        /// The value of this property is the CALayer instance that was used to create the coordinator. Clients may specify an AVCaptureVideoPreviewLayer or other CALayer instance that displays a camera's video preview. The coordinator holds a weak reference to the layer. The coordinator will return 0 degrees of rotation from -videoRotationAngleForHorizonLevelPreview if a layer was not specified at initialization, the layer is not in a view hierarchy, or the layer has been deallocated.
3484        #[unsafe(method(previewLayer))]
3485        #[unsafe(method_family = none)]
3486        pub unsafe fn previewLayer(&self) -> Option<Retained<CALayer>>;
3487
3488        #[cfg(feature = "objc2-core-foundation")]
3489        /// Returns a video rotation angle in degrees for displaying the camera's video preview in the given CALayer.
3490        ///
3491        ///
3492        /// The video rotation angle represents by how much the camera's video preview should be rotated for display in the CALayer to be horizon-level relative to gravity. An angle of 0 degrees means that video will be output in the camera's unrotated, native sensor orientation. The video rotation angle for preview may differ between cameras at different positions. For example when an iOS device is held in portrait orientation, the video preview for built-in cameras may need to be rotated by 90 degrees while the video preview for an external camera should not be rotated. External cameras return 0 degrees of rotation even if they physically rotate when their position in physical space is unknown. This property is key-value observable and delivers updates on the main queue.
3493        #[unsafe(method(videoRotationAngleForHorizonLevelPreview))]
3494        #[unsafe(method_family = none)]
3495        pub unsafe fn videoRotationAngleForHorizonLevelPreview(&self) -> CGFloat;
3496
3497        #[cfg(feature = "objc2-core-foundation")]
3498        /// Returns a video rotation angle in degrees for horizon-level capture from this camera.
3499        ///
3500        ///
3501        /// The video rotation angle represents by how much the photos or movies captured from the camera should be rotated to be horizon-level relative to gravity. A video rotation angle of 0 degrees means that the output will be in the camera's unrotated, native sensor orientation. The video rotation angle for capture may differ between cameras. For example when an iOS device is held in portrait orientation, photos and movies captured from built-in cameras may need to be rotated by 90 degrees while the photos and movies from an external camera should not be rotated. External cameras return 0 degrees of rotation even if they physically rotate when their position in physical space is unknown. The video rotation angle returned from this property is distinct from the angle returned by -videoRotationAngleForHorizonLevelPreview because in certain combinations of device and interface orientations, the video rotation angle needed for horizon-level preview may not match the amount of rotation needed for horizon-level capture. This property is key-value observable and delivers updates on the main queue.
3502        #[unsafe(method(videoRotationAngleForHorizonLevelCapture))]
3503        #[unsafe(method_family = none)]
3504        pub unsafe fn videoRotationAngleForHorizonLevelCapture(&self) -> CGFloat;
3505    );
3506}
3507
3508extern_class!(
3509    /// An AVExposureBiasRange expresses an inclusive range of supported exposure bias values, in EV units.
3510    ///
3511    ///
3512    /// This is used by AVCaptureSystemExposureBiasSlider for the range the slider uses.
3513    ///
3514    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avexposurebiasrange?language=objc)
3515    #[unsafe(super(NSObject))]
3516    #[derive(Debug, PartialEq, Eq, Hash)]
3517    pub struct AVExposureBiasRange;
3518);
3519
3520extern_conformance!(
3521    unsafe impl NSObjectProtocol for AVExposureBiasRange {}
3522);
3523
3524impl AVExposureBiasRange {
3525    extern_methods!(
3526        #[unsafe(method(init))]
3527        #[unsafe(method_family = init)]
3528        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
3529
3530        #[unsafe(method(new))]
3531        #[unsafe(method_family = new)]
3532        pub unsafe fn new() -> Retained<Self>;
3533
3534        /// A float indicating the minimum exposure bias in EV units supported by this range.
3535        #[unsafe(method(minExposureBias))]
3536        #[unsafe(method_family = none)]
3537        pub unsafe fn minExposureBias(&self) -> c_float;
3538
3539        /// A float indicating the maximum exposure bias in EV units supported by this range.
3540        #[unsafe(method(maxExposureBias))]
3541        #[unsafe(method_family = none)]
3542        pub unsafe fn maxExposureBias(&self) -> c_float;
3543
3544        /// Tests if a given exposure bias in EV units is within the exposure bias range.
3545        ///
3546        ///
3547        /// Parameter `exposureBias`: The exposure bias to test.
3548        ///
3549        /// Returns: Returns YES if the given exposure bias is within the exposure bias, NO otherwise.
3550        ///
3551        ///
3552        /// Note that the exposure bias ranges are inclusive.
3553        #[unsafe(method(containsExposureBias:))]
3554        #[unsafe(method_family = none)]
3555        pub unsafe fn containsExposureBias(&self, exposure_bias: c_float) -> bool;
3556    );
3557}
3558
3559extern_class!(
3560    /// An AVFrameRateRange expresses a range of valid frame rates as min and max rate and min and max duration.
3561    ///
3562    ///
3563    /// An AVCaptureDevice exposes an array of formats, and its current activeFormat may be queried. The payload for the formats property is an array of AVCaptureDeviceFormat objects and the activeFormat property payload is an AVCaptureDeviceFormat. AVCaptureDeviceFormat wraps a CMFormatDescription and expresses a range of valid video frame rates as an NSArray of AVFrameRateRange objects. AVFrameRateRange expresses min and max frame rate as a rate in frames per second and duration (CMTime). An AVFrameRateRange object is immutable. Its values do not change for the life of the object.
3564    ///
3565    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avframeraterange?language=objc)
3566    #[unsafe(super(NSObject))]
3567    #[derive(Debug, PartialEq, Eq, Hash)]
3568    pub struct AVFrameRateRange;
3569);
3570
3571extern_conformance!(
3572    unsafe impl NSObjectProtocol for AVFrameRateRange {}
3573);
3574
3575impl AVFrameRateRange {
3576    extern_methods!(
3577        #[unsafe(method(init))]
3578        #[unsafe(method_family = init)]
3579        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
3580
3581        #[unsafe(method(new))]
3582        #[unsafe(method_family = new)]
3583        pub unsafe fn new() -> Retained<Self>;
3584
3585        /// A Float64 indicating the minimum frame rate supported by this range.
3586        ///
3587        ///
3588        /// This read-only property indicates the minimum frame rate supported by this range in frames per second.
3589        #[unsafe(method(minFrameRate))]
3590        #[unsafe(method_family = none)]
3591        pub unsafe fn minFrameRate(&self) -> f64;
3592
3593        /// A Float64 indicating the maximum frame rate supported by this range.
3594        ///
3595        ///
3596        /// This read-only property indicates the maximum frame rate supported by this range in frames per second.
3597        #[unsafe(method(maxFrameRate))]
3598        #[unsafe(method_family = none)]
3599        pub unsafe fn maxFrameRate(&self) -> f64;
3600
3601        #[cfg(feature = "objc2-core-media")]
3602        /// A CMTime indicating the maximum frame duration supported by this range.
3603        ///
3604        ///
3605        /// This read-only property indicates the maximum frame duration supported by this range. It is the reciprocal of minFrameRate, and expresses minFrameRate as a duration.
3606        #[unsafe(method(maxFrameDuration))]
3607        #[unsafe(method_family = none)]
3608        pub unsafe fn maxFrameDuration(&self) -> CMTime;
3609
3610        #[cfg(feature = "objc2-core-media")]
3611        /// A CMTime indicating the minimum frame duration supported by this range.
3612        ///
3613        ///
3614        /// This read-only property indicates the minimum frame duration supported by this range. It is the reciprocal of maxFrameRate, and expresses maxFrameRate as a duration.
3615        #[unsafe(method(minFrameDuration))]
3616        #[unsafe(method_family = none)]
3617        pub unsafe fn minFrameDuration(&self) -> CMTime;
3618    );
3619}
3620
3621extern_class!(
3622    /// An AVZoomRange expresses an inclusive range of supported zoom factors.
3623    ///
3624    ///
3625    /// This is used by features that have requirements on zoom factors falling within certain ranges.
3626    ///
3627    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avzoomrange?language=objc)
3628    #[unsafe(super(NSObject))]
3629    #[derive(Debug, PartialEq, Eq, Hash)]
3630    pub struct AVZoomRange;
3631);
3632
3633extern_conformance!(
3634    unsafe impl NSObjectProtocol for AVZoomRange {}
3635);
3636
3637impl AVZoomRange {
3638    extern_methods!(
3639        #[unsafe(method(init))]
3640        #[unsafe(method_family = init)]
3641        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
3642
3643        #[unsafe(method(new))]
3644        #[unsafe(method_family = new)]
3645        pub unsafe fn new() -> Retained<Self>;
3646
3647        #[cfg(feature = "objc2-core-foundation")]
3648        /// A CGFloat indicating the minimum zoom factor supported by this range.
3649        #[unsafe(method(minZoomFactor))]
3650        #[unsafe(method_family = none)]
3651        pub unsafe fn minZoomFactor(&self) -> CGFloat;
3652
3653        #[cfg(feature = "objc2-core-foundation")]
3654        /// A CGFloat indicating the maximum zoom factor supported by this range.
3655        #[unsafe(method(maxZoomFactor))]
3656        #[unsafe(method_family = none)]
3657        pub unsafe fn maxZoomFactor(&self) -> CGFloat;
3658
3659        #[cfg(feature = "objc2-core-foundation")]
3660        /// Tests if a given zoom factor is within the zoom range.
3661        ///
3662        /// Parameter `zoomFactor`: The zoom factor to test.
3663        ///
3664        /// Returns: Returns YES if the given zoom factor is within the zoom range, NO otherwise.
3665        ///
3666        /// Note that the zoom ranges are inclusive.
3667        #[unsafe(method(containsZoomFactor:))]
3668        #[unsafe(method_family = none)]
3669        pub unsafe fn containsZoomFactor(&self, zoom_factor: CGFloat) -> bool;
3670    );
3671}
3672
3673/// Constants indicating the modes of video stabilization supported by the device's format.
3674///
3675/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturevideostabilizationmode?language=objc)
3676// NS_ENUM
3677#[repr(transparent)]
3678#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
3679pub struct AVCaptureVideoStabilizationMode(pub NSInteger);
3680impl AVCaptureVideoStabilizationMode {
3681    /// Indicates that video should not be stabilized.
3682    #[doc(alias = "AVCaptureVideoStabilizationModeOff")]
3683    pub const Off: Self = Self(0);
3684    /// Indicates that video should be stabilized using the standard video stabilization algorithm introduced with iOS 5.0. Standard video stabilization has a reduced field of view. Enabling video stabilization may introduce additional latency into the video capture pipeline.
3685    #[doc(alias = "AVCaptureVideoStabilizationModeStandard")]
3686    pub const Standard: Self = Self(1);
3687    /// Indicates that video should be stabilized using the cinematic stabilization algorithm for more dramatic results. Cinematic video stabilization has a reduced field of view compared to standard video stabilization. Enabling cinematic video stabilization introduces much more latency into the video capture pipeline than standard video stabilization and consumes significantly more system memory. Use narrow or identical min and max frame durations in conjunction with this mode.
3688    #[doc(alias = "AVCaptureVideoStabilizationModeCinematic")]
3689    pub const Cinematic: Self = Self(2);
3690    /// Indicates that the video should be stabilized using the extended cinematic stabilization algorithm. Enabling extended cinematic stabilization introduces longer latency into the video capture pipeline compared to the ``AVCaptureVideoStabilizationModeCinematic`` and consumes more memory, but yields improved stability. It is recommended to use identical or similar min and max frame durations in conjunction with this mode. Cinematic extended mode is face aware when enabled on a front-facing ultra wide camera on iPhone, and prioritizes stabilization of the subject of the frame rather than the background.
3691    #[doc(alias = "AVCaptureVideoStabilizationModeCinematicExtended")]
3692    pub const CinematicExtended: Self = Self(3);
3693    /// Indicates that video should be stabilized using the preview optimized stabilization algorithm. Preview stabilization is a low latency and low power algorithm which is supported only on connections which either have an associated preview layer or have a preview-sized ``AVCaptureVideoDataOutput``.
3694    #[doc(alias = "AVCaptureVideoStabilizationModePreviewOptimized")]
3695    pub const PreviewOptimized: Self = Self(4);
3696    /// Indicates that the video should be stabilized using the enhanced extended cinematic stabilization algorithm. Enhanced extended cinematic has a reduced field of view compared to extended cinematic, without any noticeable increase in latency, and it yields improved stability. It is recommended to use identical or similar min and max frame durations in conjunction with this mode. Cinematic extended enhanced mode is face aware when enabled on a front-facing ultra wide camera on iPhone, and prioritizes stabilization of the subject of the frame rather than the background.
3697    #[doc(alias = "AVCaptureVideoStabilizationModeCinematicExtendedEnhanced")]
3698    pub const CinematicExtendedEnhanced: Self = Self(5);
3699    /// Indicates that video should be stabilized using the low latency stabilization algorithm. Low Latency stabilization has a reduced field of view. Enabling low latency stabilization introduces no additional latency into the video capture pipeline.
3700    #[doc(alias = "AVCaptureVideoStabilizationModeLowLatency")]
3701    pub const LowLatency: Self = Self(6);
3702    /// Indicates that the most appropriate video stabilization mode for the device and format should be chosen.
3703    #[doc(alias = "AVCaptureVideoStabilizationModeAuto")]
3704    pub const Auto: Self = Self(-1);
3705}
3706
3707unsafe impl Encode for AVCaptureVideoStabilizationMode {
3708    const ENCODING: Encoding = NSInteger::ENCODING;
3709}
3710
3711unsafe impl RefEncode for AVCaptureVideoStabilizationMode {
3712    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
3713}
3714
3715/// Constants indicating the autofocus system.
3716///
3717///
3718/// Indicates that autofocus is not available.
3719///
3720/// Indicates that autofocus is achieved by contrast detection. Contrast detection performs a focus scan to find the optimal position.
3721///
3722/// Indicates that autofocus is achieved by phase detection. Phase detection has the ability to achieve focus in many cases without a focus scan. Phase detection autofocus is typically less visually intrusive than contrast detection autofocus.
3723///
3724/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcaptureautofocussystem?language=objc)
3725// NS_ENUM
3726#[repr(transparent)]
3727#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
3728pub struct AVCaptureAutoFocusSystem(pub NSInteger);
3729impl AVCaptureAutoFocusSystem {
3730    #[doc(alias = "AVCaptureAutoFocusSystemNone")]
3731    pub const None: Self = Self(0);
3732    #[doc(alias = "AVCaptureAutoFocusSystemContrastDetection")]
3733    pub const ContrastDetection: Self = Self(1);
3734    #[doc(alias = "AVCaptureAutoFocusSystemPhaseDetection")]
3735    pub const PhaseDetection: Self = Self(2);
3736}
3737
3738unsafe impl Encode for AVCaptureAutoFocusSystem {
3739    const ENCODING: Encoding = NSInteger::ENCODING;
3740}
3741
3742unsafe impl RefEncode for AVCaptureAutoFocusSystem {
3743    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
3744}
3745
3746extern_class!(
3747    /// An AVCaptureDeviceFormat wraps a CMFormatDescription and other format-related information, such as min and max framerate.
3748    ///
3749    ///
3750    /// An AVCaptureDevice exposes an array of formats, and its current activeFormat may be queried. The payload for the formats property is an array of AVCaptureDeviceFormat objects and the activeFormat property payload is an AVCaptureDeviceFormat. AVCaptureDeviceFormat is a thin wrapper around a CMFormatDescription, and can carry associated device format information that doesn't go in a CMFormatDescription, such as min and max frame rate. An AVCaptureDeviceFormat object is immutable. Its values do not change for the life of the object.
3751    ///
3752    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturedeviceformat?language=objc)
3753    #[unsafe(super(NSObject))]
3754    #[derive(Debug, PartialEq, Eq, Hash)]
3755    pub struct AVCaptureDeviceFormat;
3756);
3757
3758extern_conformance!(
3759    unsafe impl NSObjectProtocol for AVCaptureDeviceFormat {}
3760);
3761
3762impl AVCaptureDeviceFormat {
3763    extern_methods!(
3764        #[unsafe(method(init))]
3765        #[unsafe(method_family = init)]
3766        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
3767
3768        #[unsafe(method(new))]
3769        #[unsafe(method_family = new)]
3770        pub unsafe fn new() -> Retained<Self>;
3771
3772        #[cfg(feature = "AVMediaFormat")]
3773        /// An NSString describing the media type of an AVCaptureDevice active or supported format.
3774        ///
3775        ///
3776        /// Supported mediaTypes are listed in AVMediaFormat.h. This is a read-only property. The caller assumes no ownership of the returned value and should not CFRelease it.
3777        #[unsafe(method(mediaType))]
3778        #[unsafe(method_family = none)]
3779        pub unsafe fn mediaType(&self) -> Retained<AVMediaType>;
3780
3781        #[cfg(feature = "objc2-core-media")]
3782        /// A CMFormatDescription describing an AVCaptureDevice active or supported format.
3783        ///
3784        ///
3785        /// A CMFormatDescription describing an AVCaptureDevice active or supported format. This is a read-only property. The caller assumes no ownership of the returned value and should not CFRelease it.
3786        #[unsafe(method(formatDescription))]
3787        #[unsafe(method_family = none)]
3788        pub unsafe fn formatDescription(&self) -> Retained<CMFormatDescription>;
3789
3790        /// A property indicating the format's supported frame rate ranges.
3791        ///
3792        ///
3793        /// videoSupportedFrameRateRanges is an array of AVFrameRateRange objects, one for each of the format's supported video frame rate ranges.
3794        #[unsafe(method(videoSupportedFrameRateRanges))]
3795        #[unsafe(method_family = none)]
3796        pub unsafe fn videoSupportedFrameRateRanges(&self) -> Retained<NSArray<AVFrameRateRange>>;
3797
3798        /// A property indicating the format's horizontal field of view.
3799        ///
3800        ///
3801        /// videoFieldOfView is a float value indicating the receiver's field of view in degrees. If field of view is unknown, a value of 0 is returned.
3802        #[unsafe(method(videoFieldOfView))]
3803        #[unsafe(method_family = none)]
3804        pub unsafe fn videoFieldOfView(&self) -> c_float;
3805
3806        /// A property indicating whether the format is binned.
3807        ///
3808        ///
3809        /// videoBinned is a BOOL indicating whether the format is a binned format. Binning is a pixel-combining process which can result in greater low light sensitivity at the cost of reduced resolution.
3810        #[unsafe(method(isVideoBinned))]
3811        #[unsafe(method_family = none)]
3812        pub unsafe fn isVideoBinned(&self) -> bool;
3813
3814        /// Returns whether the format supports the given video stabilization mode.
3815        ///
3816        ///
3817        /// Parameter `videoStabilizationMode`: An AVCaptureVideoStabilizationMode to be checked.
3818        ///
3819        ///
3820        /// isVideoStabilizationModeSupported: returns a boolean value indicating whether the format can be stabilized using the given mode with -[AVCaptureConnection setPreferredVideoStabilizationMode:].
3821        #[unsafe(method(isVideoStabilizationModeSupported:))]
3822        #[unsafe(method_family = none)]
3823        pub unsafe fn isVideoStabilizationModeSupported(
3824            &self,
3825            video_stabilization_mode: AVCaptureVideoStabilizationMode,
3826        ) -> bool;
3827
3828        /// A property indicating whether the format supports video stabilization.
3829        ///
3830        ///
3831        /// videoStabilizationSupported is a BOOL indicating whether the format can be stabilized using AVCaptureConnection -setEnablesVideoStabilizationWhenAvailable. This property is deprecated. Use isVideoStabilizationModeSupported: instead.
3832        #[deprecated = "Use isVideoStabilizationModeSupported: instead."]
3833        #[unsafe(method(isVideoStabilizationSupported))]
3834        #[unsafe(method_family = none)]
3835        pub unsafe fn isVideoStabilizationSupported(&self) -> bool;
3836
3837        #[cfg(feature = "objc2-core-foundation")]
3838        /// Indicates the maximum zoom factor available for the AVCaptureDevice's videoZoomFactor property.
3839        ///
3840        ///
3841        /// If the device's videoZoomFactor property is assigned a larger value, an NSRangeException will be thrown. A maximum zoom factor of 1 indicates no zoom is available.
3842        #[unsafe(method(videoMaxZoomFactor))]
3843        #[unsafe(method_family = none)]
3844        pub unsafe fn videoMaxZoomFactor(&self) -> CGFloat;
3845
3846        #[cfg(feature = "objc2-core-foundation")]
3847        /// Indicates the value of AVCaptureDevice's videoZoomFactor property at which the image output begins to require upscaling.
3848        ///
3849        ///
3850        /// In some cases the image sensor's dimensions are larger than the dimensions reported by the video AVCaptureDeviceFormat. As long as the sensor crop is larger than the reported dimensions of the AVCaptureDeviceFormat, the image will be downscaled. Setting videoZoomFactor to the value of videoZoomFactorUpscalingThreshold will provide a center crop of the sensor image data without any scaling. If a greater zoom factor is used, then the sensor data will be upscaled to the device format's dimensions.
3851        #[unsafe(method(videoZoomFactorUpscaleThreshold))]
3852        #[unsafe(method_family = none)]
3853        pub unsafe fn videoZoomFactorUpscaleThreshold(&self) -> CGFloat;
3854
3855        /// Indicates the system's recommended zoom range for this device format.
3856        ///
3857        ///
3858        /// This property can be used to create a slider in your app's user interface to control the device's zoom with a system-recommended video zoom range. When a recommendation is not available, this property returns nil. Clients can key value observe AVCaptureDevice's minAvailableVideoZoomFactor and maxAvailableVideoZoomFactor properties to know when a device's supported zoom is restricted within the recommended zoom range.
3859        ///
3860        /// The value of this property is also used for the AVCaptureSystemZoomSlider's range.
3861        #[unsafe(method(systemRecommendedVideoZoomRange))]
3862        #[unsafe(method_family = none)]
3863        pub unsafe fn systemRecommendedVideoZoomRange(&self) -> Option<Retained<AVZoomRange>>;
3864
3865        #[cfg(feature = "objc2-core-media")]
3866        /// A CMTime indicating the minimum supported exposure duration.
3867        ///
3868        ///
3869        /// This read-only property indicates the minimum supported exposure duration.
3870        #[unsafe(method(minExposureDuration))]
3871        #[unsafe(method_family = none)]
3872        pub unsafe fn minExposureDuration(&self) -> CMTime;
3873
3874        #[cfg(feature = "objc2-core-media")]
3875        /// A CMTime indicating the maximum supported exposure duration.
3876        ///
3877        ///
3878        /// This read-only property indicates the maximum supported exposure duration.
3879        #[unsafe(method(maxExposureDuration))]
3880        #[unsafe(method_family = none)]
3881        pub unsafe fn maxExposureDuration(&self) -> CMTime;
3882
3883        /// Indicates the system's recommended exposure bias range for this device format.
3884        ///
3885        ///
3886        /// This property can be used to create a slider in your app's user interface to control the device's exposure bias with a system-recommended exposure bias range. When a recommendation is not available, this property returns nil.
3887        ///
3888        /// The value of this property is also used for the AVCaptureSystemExposureBiasSlider's range.
3889        #[unsafe(method(systemRecommendedExposureBiasRange))]
3890        #[unsafe(method_family = none)]
3891        pub unsafe fn systemRecommendedExposureBiasRange(
3892            &self,
3893        ) -> Option<Retained<AVExposureBiasRange>>;
3894
3895        /// A float indicating the minimum supported exposure ISO value.
3896        ///
3897        ///
3898        /// This read-only property indicates the minimum supported exposure ISO value.
3899        #[unsafe(method(minISO))]
3900        #[unsafe(method_family = none)]
3901        pub unsafe fn minISO(&self) -> c_float;
3902
3903        /// An float indicating the maximum supported exposure ISO value.
3904        ///
3905        ///
3906        /// This read-only property indicates the maximum supported exposure ISO value.
3907        #[unsafe(method(maxISO))]
3908        #[unsafe(method_family = none)]
3909        pub unsafe fn maxISO(&self) -> c_float;
3910
3911        /// A property indicating whether the format supports global tone mapping.
3912        ///
3913        ///
3914        /// globalToneMappingSupported is a BOOL indicating whether the format supports global tone mapping. See AVCaptureDevice's globalToneMappingEnabled property.
3915        #[unsafe(method(isGlobalToneMappingSupported))]
3916        #[unsafe(method_family = none)]
3917        pub unsafe fn isGlobalToneMappingSupported(&self) -> bool;
3918
3919        /// A property indicating whether the format supports high dynamic range streaming.
3920        ///
3921        ///
3922        /// videoHDRSupported is a BOOL indicating whether the format supports high dynamic range streaming, also known as Extended Dynamic Range (EDR). When enabled, the device streams at twice the published frame rate, capturing an under-exposed frame and correctly exposed frame for each frame time at the published rate. Portions of the under-exposed frame are combined with the correctly exposed frame to recover detail in darker areas of the scene. EDR is a separate and distinct feature from 10-bit HDR video (first seen in 2020 iPhones). 10-bit formats with HLG BT2020 color space have greater dynamic range by virtue of their expanded bit depth and HLG transfer function, and when captured in movies, contain Dolby Vision metadata. They are, in effect, "always on" HDR. And thus the videoHDRSupported property is always NO for 10-bit formats only supporting HLG BT2020 colorspace, since HDR cannot be enabled or disabled. To enable videoHDR (EDR), set the AVCaptureDevice.videoHDREnabled property.
3923        #[unsafe(method(isVideoHDRSupported))]
3924        #[unsafe(method_family = none)]
3925        pub unsafe fn isVideoHDRSupported(&self) -> bool;
3926
3927        #[cfg(feature = "objc2-core-media")]
3928        /// CMVideoDimensions indicating the highest resolution still image that can be produced by this format.
3929        ///
3930        ///
3931        /// By default, AVCapturePhotoOutput and AVCaptureStillImageOutput emit images with the same dimensions as their source AVCaptureDevice's activeFormat.formatDescription property. Some device formats support high resolution photo output. That is, they can stream video to an AVCaptureVideoDataOutput or AVCaptureMovieFileOutput at one resolution while outputting photos to AVCapturePhotoOutput at a higher resolution. You may query this property to discover a video format's supported high resolution still image dimensions. See -[AVCapturePhotoOutput highResolutionPhotoEnabled], -[AVCapturePhotoSettings highResolutionPhotoEnabled], and -[AVCaptureStillImageOutput highResolutionStillImageOutputEnabled].
3932        ///
3933        /// AVCaptureDeviceFormats of type AVMediaTypeDepthData may also support the delivery of a higher resolution depth data map to an AVCapturePhotoOutput. Chief differences are:
3934        /// - Depth data accompanying still images is not supported by AVCaptureStillImageOutput. You must use AVCapturePhotoOutput.
3935        /// - By opting in for depth data ( -[AVCapturePhotoSettings setDepthDataDeliveryEnabled:YES] ), you implicitly opt in for high resolution depth data if it's available. You may query the -[AVCaptureDevice activeDepthDataFormat]'s highResolutionStillImageDimensions to discover the depth data resolution that will be delivered with captured photos.
3936        #[deprecated = "Use supportedMaxPhotoDimensions instead."]
3937        #[unsafe(method(highResolutionStillImageDimensions))]
3938        #[unsafe(method_family = none)]
3939        pub unsafe fn highResolutionStillImageDimensions(&self) -> CMVideoDimensions;
3940
3941        /// A boolean value specifying whether this format supports high photo quality when selecting an AVCapturePhotoQualityPrioritization of .balanced or .quality.
3942        ///
3943        ///
3944        /// If an AVCaptureDeviceFormat's highPhotoQualitySupported property is YES, the format produces higher image quality when selecting .balanced or .quality AVCapturePhotoQualityPrioritization compared to .speed. Such formats adhere to the following rules:
3945        /// - Photo requests with a prioritization of .speed produce the fastest image result (suitable for burst captures).
3946        /// - Photo requests with a prioritization of .balanced produce higher image quality without dropping frames if a video recording is underway.
3947        /// - Photo requests with a prioritization of .quality produce high image quality and may cause frame drops if a video recording is underway. For maximum backward compatibility, photo requests on high photo quality formats set to .quality only cause video frame drops if your app is linked on or after iOS 15.
3948        /// Formats that don't support high photo quality produce the same image quality whether you select .speed, .balanced, or .quality. Note that high photo quality is only attainable when using the AVCapturePhotoOutput with these supported formats.
3949        #[unsafe(method(isHighPhotoQualitySupported))]
3950        #[unsafe(method_family = none)]
3951        pub unsafe fn isHighPhotoQualitySupported(&self) -> bool;
3952
3953        /// A boolean value specifying whether this format supports the highest possible photo quality that can be delivered on the current platform.
3954        ///
3955        ///
3956        /// Of the many formats supported by an AVCaptureDevice, only a few of them are designated as "photo" formats which can produce the highest possible quality, such as still image stabilization and Live Photos. If you intend to connect an AVCaptureDeviceInput to an AVCapturePhotoOutput and receive the best possible images, you should ensure that you are either using the AVCaptureSessionPresetPhoto as your preset, or if using the parallel AVCaptureDevice activeFormat API, select as your activeFormat one for which this property is YES.
3957        #[unsafe(method(isHighestPhotoQualitySupported))]
3958        #[unsafe(method_family = none)]
3959        pub unsafe fn isHighestPhotoQualitySupported(&self) -> bool;
3960
3961        /// A property indicating the autofocus system.
3962        ///
3963        ///
3964        /// This read-only property indicates the autofocus system.
3965        #[unsafe(method(autoFocusSystem))]
3966        #[unsafe(method_family = none)]
3967        pub unsafe fn autoFocusSystem(&self) -> AVCaptureAutoFocusSystem;
3968
3969        /// A property indicating the receiver's supported color spaces.
3970        ///
3971        ///
3972        /// This read-only property indicates the receiver's supported color spaces as an array of AVCaptureColorSpace constants sorted from narrow to wide color.
3973        #[unsafe(method(supportedColorSpaces))]
3974        #[unsafe(method_family = none)]
3975        pub unsafe fn supportedColorSpaces(&self) -> Retained<NSArray<NSNumber>>;
3976
3977        #[cfg(feature = "objc2-core-foundation")]
3978        /// A deprecated property. Please use supportedVideoZoomFactorsForDepthDataDelivery instead
3979        #[deprecated]
3980        #[unsafe(method(videoMinZoomFactorForDepthDataDelivery))]
3981        #[unsafe(method_family = none)]
3982        pub unsafe fn videoMinZoomFactorForDepthDataDelivery(&self) -> CGFloat;
3983
3984        #[cfg(feature = "objc2-core-foundation")]
3985        /// A deprecated property. Please use supportedVideoZoomFactorsForDepthDataDelivery instead
3986        #[deprecated]
3987        #[unsafe(method(videoMaxZoomFactorForDepthDataDelivery))]
3988        #[unsafe(method_family = none)]
3989        pub unsafe fn videoMaxZoomFactorForDepthDataDelivery(&self) -> CGFloat;
3990
3991        /// A deprecated property. Please use supportedVideoZoomRangesForDepthDataDelivery
3992        #[deprecated]
3993        #[unsafe(method(supportedVideoZoomFactorsForDepthDataDelivery))]
3994        #[unsafe(method_family = none)]
3995        pub unsafe fn supportedVideoZoomFactorsForDepthDataDelivery(
3996            &self,
3997        ) -> Retained<NSArray<NSNumber>>;
3998
3999        /// This property returns the zoom ranges within which depth data can be delivered.
4000        ///
4001        /// Virtual devices support limited zoom ranges when delivering depth data to any output. If this device format has no -supportedDepthDataFormats, this property returns an empty array.
4002        /// The presence of one or more ranges where the min and max zoom factors are not equal means that "continuous zoom" with depth is supported.
4003        /// For example:
4004        /// a) ranges:
4005        /// @
4006        /// [ [2..2], [4..4] ]
4007        /// only zoom factors 2 and 4 are allowed to be set when depthDataDelivery is enabled. Any other zoom factor results in an exception.
4008        /// b) ranges:
4009        /// @
4010        /// [ [2..5] ]
4011        /// depthDataDelivery is supported with zoom factors [2..5]. Zoom factors outside of this range may be set, but will result in loss of depthDataDeliery. Whenever zoom is set back to a value within the range of [2..5], depthDataDelivery will resume.
4012        ///
4013        /// When depth data delivery is enabled, the effective videoZoomFactorUpscaleThreshold will be 1.0, meaning that all zoom factors that are not native zoom factors (see AVCaptureDevice.virtualDeviceSwitchOverVideoZoomFactors and AVCaptureDevice.secondaryNativeResolutionZoomFactors) result in digital upscaling.
4014        #[unsafe(method(supportedVideoZoomRangesForDepthDataDelivery))]
4015        #[unsafe(method_family = none)]
4016        pub unsafe fn supportedVideoZoomRangesForDepthDataDelivery(
4017            &self,
4018        ) -> Retained<NSArray<AVZoomRange>>;
4019
4020        /// This property returns whether the format supports zoom factors outside of the supportedVideoZoomFactorRangesForDepthDataDelivery.
4021        ///
4022        /// When a zoom factor outside of the supportedVideoZoomFactorRangesForDepthDataDelivery is set, depth data delivery will be suspended until a zoom factor within the supportedVideoZoomFactorRangesForDepthDataDelivery is set.
4023        #[unsafe(method(zoomFactorsOutsideOfVideoZoomRangesForDepthDeliverySupported))]
4024        #[unsafe(method_family = none)]
4025        pub unsafe fn zoomFactorsOutsideOfVideoZoomRangesForDepthDeliverySupported(&self) -> bool;
4026
4027        /// Indicates this format's companion depth data formats.
4028        ///
4029        ///
4030        /// If no depth data formats are supported by the receiver, an empty array is returned. On virtual devices, the supportedDepthDataFormats list items always match the aspect ratio of their paired video format. When the receiver is set as the device's activeFormat, you may set the device's activeDepthDataFormat to one of these supported depth data formats.
4031        #[unsafe(method(supportedDepthDataFormats))]
4032        #[unsafe(method_family = none)]
4033        pub unsafe fn supportedDepthDataFormats(&self) -> Retained<NSArray<AVCaptureDeviceFormat>>;
4034
4035        /// A property indicating AVCaptureOutput subclasses the receiver does not support.
4036        ///
4037        ///
4038        /// As a rule, AVCaptureDeviceFormats of a given mediaType are available for use with all AVCaptureOutputs that accept that media type, but there are exceptions. For instance, on apps linked against iOS versions earlier than 12.0, the photo resolution video formats may not be used as sources for AVCaptureMovieFileOutput due to bandwidth limitations. On DualCamera devices, AVCaptureDepthDataOutput is not supported when outputting full resolution (i.e. 12 MP) video due to bandwidth limitations. In order to stream depth data plus video data from a photo format, ensure that your AVCaptureVideoDataOutput's deliversPreviewSizedOutputBuffers property is set to YES. Likewise, to stream depth data while capturing video to a movie file using AVCaptureMovieFileOutput, call -[AVCaptureSession setSessionPreset:AVCaptureSessionPresetPhoto]. When using the photo preset, video is captured at preview resolution rather than the full sensor resolution.
4039        #[unsafe(method(unsupportedCaptureOutputClasses))]
4040        #[unsafe(method_family = none)]
4041        pub unsafe fn unsupportedCaptureOutputClasses(&self) -> Retained<NSArray<AnyClass>>;
4042
4043        /// This property lists all of the supported maximum photo dimensions for this format. The array contains CMVideoDimensions structs encoded as NSValues.
4044        ///
4045        /// Enumerate all supported resolution settings for which this format may be configured to capture photos. Use these values to set AVCapturePhotoOutput.maxPhotoDimensions and AVCapturePhotoSettings.maxPhotoDimensions.
4046        #[unsafe(method(supportedMaxPhotoDimensions))]
4047        #[unsafe(method_family = none)]
4048        pub unsafe fn supportedMaxPhotoDimensions(&self) -> Retained<NSArray<NSValue>>;
4049
4050        /// Indicates zoom factors at which this device transitions to secondary native resolution modes.
4051        ///
4052        /// Devices with this property have the means to switch their pixel sampling mode on the fly to produce a high-fidelity, non-upsampled images at a fixed zoom factor beyond 1.0x.
4053        #[unsafe(method(secondaryNativeResolutionZoomFactors))]
4054        #[unsafe(method_family = none)]
4055        pub unsafe fn secondaryNativeResolutionZoomFactors(&self) -> Retained<NSArray<NSNumber>>;
4056
4057        /// Indicates whether the device format supports auto video frame rate.
4058        ///
4059        ///
4060        /// See -[AVCaptureDevice autoVideoFrameRateEnabled] (above) for a detailed description of the feature.
4061        #[unsafe(method(isAutoVideoFrameRateSupported))]
4062        #[unsafe(method_family = none)]
4063        pub unsafe fn isAutoVideoFrameRateSupported(&self) -> bool;
4064    );
4065}
4066
4067/// AVCaptureDeviceFormatDepthDataAdditions.
4068impl AVCaptureDeviceFormat {
4069    extern_methods!(
4070        #[unsafe(method(isPortraitEffectsMatteStillImageDeliverySupported))]
4071        #[unsafe(method_family = none)]
4072        pub unsafe fn isPortraitEffectsMatteStillImageDeliverySupported(&self) -> bool;
4073    );
4074}
4075
4076/// AVCaptureDeviceFormatMultiCamAdditions.
4077impl AVCaptureDeviceFormat {
4078    extern_methods!(
4079        /// A property indicating whether this format is supported in an AVCaptureMultiCamSession.
4080        ///
4081        ///
4082        /// When using an AVCaptureSession (single camera capture), any of the formats in the device's -formats array may be set as the -activeFormat. However, when used with an AVCaptureMultiCamSession, the device's -activeFormat may only be set to one of the formats for which multiCamSupported answers YES. This limited subset of capture formats are known to run sustainably in a multi camera capture scenario.
4083        #[unsafe(method(isMultiCamSupported))]
4084        #[unsafe(method_family = none)]
4085        pub unsafe fn isMultiCamSupported(&self) -> bool;
4086    );
4087}
4088
4089/// AVCaptureDeviceFormatSpatialVideoCapture.
4090impl AVCaptureDeviceFormat {
4091    extern_methods!(
4092        /// Returns whether or not the format supports capturing spatial video to a file.
4093        #[unsafe(method(isSpatialVideoCaptureSupported))]
4094        #[unsafe(method_family = none)]
4095        pub unsafe fn isSpatialVideoCaptureSupported(&self) -> bool;
4096    );
4097}
4098
4099/// AVCaptureDeviceFormatGeometricDistortionCorrection.
4100impl AVCaptureDeviceFormat {
4101    extern_methods!(
4102        /// A property indicating the format's horizontal field of view post geometric distortion correction.
4103        ///
4104        ///
4105        /// If the receiver's AVCaptureDevice does not support GDC, geometricDistortionCorrectedVideoFieldOfView matches the videoFieldOfView property.
4106        #[unsafe(method(geometricDistortionCorrectedVideoFieldOfView))]
4107        #[unsafe(method_family = none)]
4108        pub unsafe fn geometricDistortionCorrectedVideoFieldOfView(&self) -> c_float;
4109    );
4110}
4111
4112/// AVCaptureDeviceFormatCenterStage.
4113impl AVCaptureDeviceFormat {
4114    extern_methods!(
4115        /// Indicates whether the format supports the Center Stage feature.
4116        ///
4117        ///
4118        /// This property returns YES if the format supports "Center Stage", which automatically adjusts the camera to keep people optimally framed within the field of view. See +AVCaptureDevice.centerStageEnabled for a detailed discussion.
4119        #[unsafe(method(isCenterStageSupported))]
4120        #[unsafe(method_family = none)]
4121        pub unsafe fn isCenterStageSupported(&self) -> bool;
4122
4123        #[cfg(feature = "objc2-core-foundation")]
4124        /// Indicates the minimum zoom factor available for the AVCaptureDevice's videoZoomFactor property when centerStageActive is YES.
4125        ///
4126        ///
4127        /// Devices support a limited zoom range when Center Stage is active. If this device format does not support Center Stage, this property returns 1.0.
4128        #[unsafe(method(videoMinZoomFactorForCenterStage))]
4129        #[unsafe(method_family = none)]
4130        pub unsafe fn videoMinZoomFactorForCenterStage(&self) -> CGFloat;
4131
4132        #[cfg(feature = "objc2-core-foundation")]
4133        /// Indicates the maximum zoom factor available for the AVCaptureDevice's videoZoomFactor property when centerStageActive is YES.
4134        ///
4135        ///
4136        /// Devices support a limited zoom range when Center Stage is active. If this device format does not support Center Stage, this property returns videoMaxZoomFactor.
4137        #[unsafe(method(videoMaxZoomFactorForCenterStage))]
4138        #[unsafe(method_family = none)]
4139        pub unsafe fn videoMaxZoomFactorForCenterStage(&self) -> CGFloat;
4140
4141        /// Indicates the minimum / maximum frame rates available when centerStageActive is YES.
4142        ///
4143        ///
4144        /// Devices may support a limited frame rate range when Center Stage is active. If this device format does not support Center Stage, this property returns nil.
4145        #[unsafe(method(videoFrameRateRangeForCenterStage))]
4146        #[unsafe(method_family = none)]
4147        pub unsafe fn videoFrameRateRangeForCenterStage(
4148            &self,
4149        ) -> Option<Retained<AVFrameRateRange>>;
4150    );
4151}
4152
4153/// AVCaptureDeviceFormatPortraitEffect.
4154impl AVCaptureDeviceFormat {
4155    extern_methods!(
4156        /// Indicates whether the format supports the Portrait Effect feature.
4157        ///
4158        ///
4159        /// This property returns YES if the format supports Portrait Effect, the application of a shallow depth of field effect to objects in the background. See +AVCaptureDevice.portraitEffectEnabled for a detailed discussion.
4160        #[unsafe(method(isPortraitEffectSupported))]
4161        #[unsafe(method_family = none)]
4162        pub unsafe fn isPortraitEffectSupported(&self) -> bool;
4163
4164        /// Indicates the minimum / maximum frame rates available when portraitEffectActive is YES.
4165        ///
4166        ///
4167        /// Devices may support a limited frame rate range when Portrait Effect is active. If this device format does not support Portrait Effect, this property returns nil.
4168        #[unsafe(method(videoFrameRateRangeForPortraitEffect))]
4169        #[unsafe(method_family = none)]
4170        pub unsafe fn videoFrameRateRangeForPortraitEffect(
4171            &self,
4172        ) -> Option<Retained<AVFrameRateRange>>;
4173    );
4174}
4175
4176/// AVCaptureDeviceStudioLight.
4177impl AVCaptureDevice {
4178    extern_methods!(
4179        /// A class property indicating whether the Studio Light feature is currently enabled in Control Center.
4180        ///
4181        ///
4182        /// This property changes to reflect the Studio Light state in Control Center. It is key-value observable.  On iOS, Studio Light only applies to video conferencing apps by default (apps that use "voip" as one of their UIBackgroundModes). Non video conferencing apps may opt in for Studio Light by adding the following key to their Info.plist:
4183        /// <key
4184        /// >NSCameraStudioLightEnabled
4185        /// </key
4186        /// >
4187        /// <true
4188        /// />
4189        #[unsafe(method(isStudioLightEnabled))]
4190        #[unsafe(method_family = none)]
4191        pub unsafe fn isStudioLightEnabled() -> bool;
4192
4193        /// Indicates whether Studio Light is currently active on a particular AVCaptureDevice.
4194        ///
4195        ///
4196        /// This readonly property returns YES when Studio Light is currently active on the receiver. When active, the subject's face is artificially lit to simulate the presence of a studio light near the camera.
4197        #[unsafe(method(isStudioLightActive))]
4198        #[unsafe(method_family = none)]
4199        pub unsafe fn isStudioLightActive(&self) -> bool;
4200    );
4201}
4202
4203/// AVCaptureDeviceFormatStudioLight.
4204impl AVCaptureDeviceFormat {
4205    extern_methods!(
4206        /// Indicates whether the format supports the Studio Light feature.
4207        ///
4208        ///
4209        /// This property returns YES if the format supports Studio Light (artificial re-lighting of the subject's face). See +AVCaptureDevice.studioLightEnabled.
4210        #[unsafe(method(isStudioLightSupported))]
4211        #[unsafe(method_family = none)]
4212        pub unsafe fn isStudioLightSupported(&self) -> bool;
4213
4214        /// Indicates the minimum / maximum frame rates available when studioLight is YES.
4215        ///
4216        ///
4217        /// Devices may support a limited frame rate range when Studio Light is active. If this device format does not support Studio Light, this property returns nil.
4218        #[unsafe(method(videoFrameRateRangeForStudioLight))]
4219        #[unsafe(method_family = none)]
4220        pub unsafe fn videoFrameRateRangeForStudioLight(
4221            &self,
4222        ) -> Option<Retained<AVFrameRateRange>>;
4223    );
4224}
4225
4226/// AVCaptureDeviceFormatReactionEffects.
4227impl AVCaptureDeviceFormat {
4228    extern_methods!(
4229        /// Indicates whether the format supports the Reaction Effects feature.
4230        ///
4231        ///
4232        /// This property returns YES if the format supports Reaction Effects. See +AVCaptureDevice.reactionEffectsEnabled.
4233        #[unsafe(method(reactionEffectsSupported))]
4234        #[unsafe(method_family = none)]
4235        pub unsafe fn reactionEffectsSupported(&self) -> bool;
4236
4237        /// Indicates the minimum / maximum frame rates available when a reaction effect is running.
4238        ///
4239        ///
4240        /// Unlike the other video effects, enabling reaction effects does not limit the stream's frame rate because most of the time no rendering is being performed. The frame rate will only ramp down when a reaction is actually being rendered on the stream (see AVCaptureDevice.reactionEffectsInProgress)
4241        #[unsafe(method(videoFrameRateRangeForReactionEffectsInProgress))]
4242        #[unsafe(method_family = none)]
4243        pub unsafe fn videoFrameRateRangeForReactionEffectsInProgress(
4244            &self,
4245        ) -> Option<Retained<AVFrameRateRange>>;
4246    );
4247}
4248
4249/// AVCaptureDeviceFormatBackgroundReplacement.
4250impl AVCaptureDeviceFormat {
4251    extern_methods!(
4252        /// Indicates whether the format supports the Background Replacement feature.
4253        ///
4254        ///
4255        /// This property returns YES if the format supports Background Replacement background replacement. See +AVCaptureDevice.backgroundReplacementEnabled.
4256        #[unsafe(method(isBackgroundReplacementSupported))]
4257        #[unsafe(method_family = none)]
4258        pub unsafe fn isBackgroundReplacementSupported(&self) -> bool;
4259
4260        /// Indicates the minimum / maximum frame rates available when background replacement is active.
4261        ///
4262        ///
4263        /// Devices may support a limited frame rate range when Background Replacement is active. If this device format does not support Background Replacement, this property returns nil.
4264        #[unsafe(method(videoFrameRateRangeForBackgroundReplacement))]
4265        #[unsafe(method_family = none)]
4266        pub unsafe fn videoFrameRateRangeForBackgroundReplacement(
4267            &self,
4268        ) -> Option<Retained<AVFrameRateRange>>;
4269    );
4270}
4271
4272/// AVCaptureDeviceFormatCinematicVideoSupport.
4273impl AVCaptureDeviceFormat {
4274    extern_methods!(
4275        /// Indicates whether the format supports Cinematic Video capture.
4276        ///
4277        /// This property returns `true` if the format supports Cinematic Video that produces a controllable, simulated depth of field and adds beautiful focus transitions for a cinema-grade look.
4278        #[unsafe(method(isCinematicVideoCaptureSupported))]
4279        #[unsafe(method_family = none)]
4280        pub unsafe fn isCinematicVideoCaptureSupported(&self) -> bool;
4281
4282        /// Default shallow depth of field simulated aperture.
4283        ///
4284        /// This property return a non-zero value on devices that support the shallow depth of field effect.
4285        #[unsafe(method(defaultSimulatedAperture))]
4286        #[unsafe(method_family = none)]
4287        pub unsafe fn defaultSimulatedAperture(&self) -> c_float;
4288
4289        /// Minimum supported shallow depth of field simulated aperture.
4290        ///
4291        /// On devices that do not support changing the simulated aperture value, this returns a value of `0`.
4292        #[unsafe(method(minSimulatedAperture))]
4293        #[unsafe(method_family = none)]
4294        pub unsafe fn minSimulatedAperture(&self) -> c_float;
4295
4296        /// Maximum supported shallow depth of field simulated aperture.
4297        ///
4298        /// On devices that do not support changing the simulated aperture value, this returns a value of `0`.
4299        #[unsafe(method(maxSimulatedAperture))]
4300        #[unsafe(method_family = none)]
4301        pub unsafe fn maxSimulatedAperture(&self) -> c_float;
4302
4303        #[cfg(feature = "objc2-core-foundation")]
4304        /// Indicates the minimum zoom factor available for the ``AVCaptureDevice/videoZoomFactor`` property when Cinematic Video capture is enabled on the device input.
4305        ///
4306        /// Devices support a limited zoom range when Cinematic Video capture is active. If this device format does not support Cinematic Video capture, this property returns `1.0`.
4307        #[unsafe(method(videoMinZoomFactorForCinematicVideo))]
4308        #[unsafe(method_family = none)]
4309        pub unsafe fn videoMinZoomFactorForCinematicVideo(&self) -> CGFloat;
4310
4311        #[cfg(feature = "objc2-core-foundation")]
4312        /// Indicates the maximum zoom factor available for the ``AVCaptureDevice/videoZoomFactor`` property when Cinematic Video capture is enabled on the device input.
4313        ///
4314        /// Devices support a limited zoom range when Cinematic Video capture is active. If this device format does not support Cinematic Video capture, this property returns `1.0`.
4315        #[unsafe(method(videoMaxZoomFactorForCinematicVideo))]
4316        #[unsafe(method_family = none)]
4317        pub unsafe fn videoMaxZoomFactorForCinematicVideo(&self) -> CGFloat;
4318
4319        /// Indicates the minimum / maximum frame rates available when Cinematic Video capture is enabled on the device input.
4320        ///
4321        /// Devices may support a limited frame rate range when Cinematic Video capture is active. If this device format does not support Cinematic Video capture, this property returns `nil`.
4322        #[unsafe(method(videoFrameRateRangeForCinematicVideo))]
4323        #[unsafe(method_family = none)]
4324        pub unsafe fn videoFrameRateRangeForCinematicVideo(
4325            &self,
4326        ) -> Option<Retained<AVFrameRateRange>>;
4327    );
4328}
4329
4330/// DynamicAspectRatio.
4331impl AVCaptureDeviceFormat {
4332    extern_methods!(
4333        /// Indicates the supported aspect ratios for the device format.
4334        ///
4335        /// An array that describes the aspect ratios that are supported for this format. If this device format does not support dynamic aspect ratio, this property returns an empty array.
4336        #[unsafe(method(supportedDynamicAspectRatios))]
4337        #[unsafe(method_family = none)]
4338        pub unsafe fn supportedDynamicAspectRatios(
4339            &self,
4340        ) -> Retained<NSArray<AVCaptureAspectRatio>>;
4341
4342        /// Indicates the horizontal field of view for an aspect ratio, either uncorrected or corrected for geometric distortion.
4343        ///
4344        /// A float indicating the field of view for the corresponding ``AVCaptureAspectRatio``. Set ``AVCaptureDevice/geometricDistortionCorrected`` to `true` to receive the field of view corrected for geometric distortion. If this device format does not support dynamic aspect ratio, this function returns `0`.
4345        #[unsafe(method(videoFieldOfViewForAspectRatio:geometricDistortionCorrected:))]
4346        #[unsafe(method_family = none)]
4347        pub unsafe fn videoFieldOfViewForAspectRatio_geometricDistortionCorrected(
4348            &self,
4349            aspect_ratio: &AVCaptureAspectRatio,
4350            geometric_distortion_corrected: bool,
4351        ) -> c_float;
4352    );
4353}
4354
4355/// AVCaptureDeviceFormatSmartFraming.
4356impl AVCaptureDeviceFormat {
4357    extern_methods!(
4358        /// Returns `true` if smart framing is supported by the current format.
4359        ///
4360        /// An ultra wide camera device that supports dynamic aspect ratio configuration may also support "smart framing monitoring" on particular formats.
4361        #[unsafe(method(isSmartFramingSupported))]
4362        #[unsafe(method_family = none)]
4363        pub unsafe fn isSmartFramingSupported(&self) -> bool;
4364    );
4365}
4366
4367/// CameraLensSmudgeDetection.
4368impl AVCaptureDeviceFormat {
4369    extern_methods!(
4370        /// Whether camera lens smudge detection is supported.
4371        ///
4372        /// This property returns `true` if the session's current configuration supports lens smudge detection. When switching cameras or formats, this property may change. When this property changes from `true` to `false`, ``AVCaptureDevice/cameraLensSmudgeDetectionEnabled`` also reverts to `false`. If you opt in for lens smudge detection and then change configurations, you should set ``AVCaptureDevice/cameraLensSmudgeDetectionEnabled`` to `true` again.
4373        #[unsafe(method(isCameraLensSmudgeDetectionSupported))]
4374        #[unsafe(method_family = none)]
4375        pub unsafe fn isCameraLensSmudgeDetectionSupported(&self) -> bool;
4376    );
4377}
4378
4379/// Constants indicating the current camera lens smudge detection status.
4380///
4381/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturecameralenssmudgedetectionstatus?language=objc)
4382// NS_ENUM
4383#[repr(transparent)]
4384#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
4385pub struct AVCaptureCameraLensSmudgeDetectionStatus(pub NSInteger);
4386impl AVCaptureCameraLensSmudgeDetectionStatus {
4387    /// Indicates that the detection is not enabled.
4388    #[doc(alias = "AVCaptureCameraLensSmudgeDetectionStatusDisabled")]
4389    pub const Disabled: Self = Self(0);
4390    /// Indicates that the most recent detection found no smudge on the camera lens.
4391    #[doc(alias = "AVCaptureCameraLensSmudgeDetectionStatusSmudgeNotDetected")]
4392    pub const SmudgeNotDetected: Self = Self(1);
4393    /// Indicates that the most recent detection found the camera lens to be smudged.
4394    #[doc(alias = "AVCaptureCameraLensSmudgeDetectionStatusSmudged")]
4395    pub const Smudged: Self = Self(2);
4396    /// Indicates that the detection result has not settled, commonly caused by excessive camera movement or the content of the scene.
4397    #[doc(alias = "AVCaptureCameraLensSmudgeDetectionStatusUnknown")]
4398    pub const Unknown: Self = Self(3);
4399}
4400
4401unsafe impl Encode for AVCaptureCameraLensSmudgeDetectionStatus {
4402    const ENCODING: Encoding = NSInteger::ENCODING;
4403}
4404
4405unsafe impl RefEncode for AVCaptureCameraLensSmudgeDetectionStatus {
4406    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
4407}
4408
4409/// CameraLensSmudgeDetection.
4410impl AVCaptureDevice {
4411    extern_methods!(
4412        #[cfg(feature = "objc2-core-media")]
4413        /// Specify whether to enable camera lens smudge detection, and the interval time between each run of detections.
4414        ///
4415        /// - Parameter cameraLensSmudgeDetectionEnabled: Specify whether camera lens smudge detection should be enabled.
4416        /// - Parameter detectionInterval: The detection running interval if detection is enabled.
4417        ///
4418        /// Each run of detection processes frames over a short period, and produces one detection result. Use `detectionInterval` to specify the interval time between each run of detections. For example, when ``cameraLensSmudgeDetectionEnabled`` is set to `true` and `detectionInterval` is set to 1 minute, detection runs once per minute, and updates ``AVCaptureCameraLensSmudgeDetectionStatus``. If `detectionInterval` is set to ``kCMTimeInvalid``, detection runs only once after the session starts. If `detectionInterval` is set to ``kCMTimeZero``, detection runs continuously.
4419        ///
4420        /// ``AVCaptureDevice`` throws an `NSInvalidArgumentException` if the ``AVCaptureDeviceFormat/cameraLensSmudgeDetectionSupported`` property on the current active format returns `false`. Enabling detection requires a lengthy reconfiguration of the capture render pipeline, so you should enable detection before calling ``AVCaptureSession/startRunning`` or within ``AVCaptureSession/beginConfiguration`` and ``AVCaptureSession/commitConfiguration`` while running.
4421        #[unsafe(method(setCameraLensSmudgeDetectionEnabled:detectionInterval:))]
4422        #[unsafe(method_family = none)]
4423        pub unsafe fn setCameraLensSmudgeDetectionEnabled_detectionInterval(
4424            &self,
4425            camera_lens_smudge_detection_enabled: bool,
4426            detection_interval: CMTime,
4427        );
4428
4429        /// Whether camera lens smudge detection is enabled.
4430        ///
4431        /// You enable lens smudge detection by calling ``setCameraLensSmudgeDetectionEnabled:detectionInterval:``. By default, this property is returns `false`.
4432        #[unsafe(method(isCameraLensSmudgeDetectionEnabled))]
4433        #[unsafe(method_family = none)]
4434        pub unsafe fn isCameraLensSmudgeDetectionEnabled(&self) -> bool;
4435
4436        #[cfg(feature = "objc2-core-media")]
4437        /// The camera lens smudge detection interval.
4438        ///
4439        /// ``cameraLensSmudgeDetectionInterval`` is set by calling ``setCameraLensSmudgeDetectionEnabled:detectionInterval:``. By default, this property returns `kCMTimeInvalid`.
4440        #[unsafe(method(cameraLensSmudgeDetectionInterval))]
4441        #[unsafe(method_family = none)]
4442        pub unsafe fn cameraLensSmudgeDetectionInterval(&self) -> CMTime;
4443
4444        /// A value specifying the status of camera lens smudge detection.
4445        ///
4446        /// During initial detection execution, ``cameraLensSmudgeDetectionStatus`` returns ``AVCaptureCameraLensSmudgeDetectionStatusUnknown`` until the detection result settles. Once a detection result is produced, ``cameraLensSmudgeDetectionStatus`` returns the most recent detection result. This property can be key-value observed.
4447        #[unsafe(method(cameraLensSmudgeDetectionStatus))]
4448        #[unsafe(method_family = none)]
4449        pub unsafe fn cameraLensSmudgeDetectionStatus(
4450            &self,
4451        ) -> AVCaptureCameraLensSmudgeDetectionStatus;
4452    );
4453}
4454
4455extern_class!(
4456    /// An AVCaptureDeviceInputSource represents a distinct input source on an AVCaptureDevice object.
4457    ///
4458    ///
4459    /// An AVCaptureDevice may optionally present an array of inputSources, representing distinct mutually exclusive inputs to the device, for example, an audio AVCaptureDevice might have ADAT optical and analog input sources. A video AVCaptureDevice might have an HDMI input source, or a component input source.
4460    ///
4461    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturedeviceinputsource?language=objc)
4462    #[unsafe(super(NSObject))]
4463    #[derive(Debug, PartialEq, Eq, Hash)]
4464    pub struct AVCaptureDeviceInputSource;
4465);
4466
4467extern_conformance!(
4468    unsafe impl NSObjectProtocol for AVCaptureDeviceInputSource {}
4469);
4470
4471impl AVCaptureDeviceInputSource {
4472    extern_methods!(
4473        #[unsafe(method(init))]
4474        #[unsafe(method_family = init)]
4475        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
4476
4477        #[unsafe(method(new))]
4478        #[unsafe(method_family = new)]
4479        pub unsafe fn new() -> Retained<Self>;
4480
4481        /// An ID unique among the inputSources exposed by a given AVCaptureDevice.
4482        ///
4483        ///
4484        /// An AVCaptureDevice's inputSources array must contain AVCaptureInputSource objects with unique inputSourceIDs.
4485        #[unsafe(method(inputSourceID))]
4486        #[unsafe(method_family = none)]
4487        pub unsafe fn inputSourceID(&self) -> Retained<NSString>;
4488
4489        /// A localized human-readable name for the receiver.
4490        ///
4491        ///
4492        /// This property can be used for displaying the name of the capture device input source in a user interface.
4493        #[unsafe(method(localizedName))]
4494        #[unsafe(method_family = none)]
4495        pub unsafe fn localizedName(&self) -> Retained<NSString>;
4496    );
4497}