objc2_av_foundation/generated/
AVCapturePhotoOutput.rs

1//! This file has been automatically generated by `objc2`'s `header-translator`.
2//! DO NOT EDIT
3use core::ffi::*;
4use core::ptr::NonNull;
5use objc2::__framework_prelude::*;
6#[cfg(feature = "objc2-core-graphics")]
7use objc2_core_graphics::*;
8#[cfg(feature = "objc2-core-media")]
9use objc2_core_media::*;
10#[cfg(feature = "objc2-core-video")]
11use objc2_core_video::*;
12use objc2_foundation::*;
13
14use crate::*;
15
16/// Constants indicating how photo quality should be prioritized against speed.
17///
18///
19/// Indicates that speed of photo delivery is most important, even at the expense of quality.
20///
21/// Indicates that photo quality and speed of delivery are balanced in priority.
22///
23/// Indicates that photo quality is paramount, even at the expense of shot-to-shot time.
24///
25/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturephotoqualityprioritization?language=objc)
26// NS_ENUM
27#[repr(transparent)]
28#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
29pub struct AVCapturePhotoQualityPrioritization(pub NSInteger);
30impl AVCapturePhotoQualityPrioritization {
31    #[doc(alias = "AVCapturePhotoQualityPrioritizationSpeed")]
32    pub const Speed: Self = Self(1);
33    #[doc(alias = "AVCapturePhotoQualityPrioritizationBalanced")]
34    pub const Balanced: Self = Self(2);
35    #[doc(alias = "AVCapturePhotoQualityPrioritizationQuality")]
36    pub const Quality: Self = Self(3);
37}
38
39unsafe impl Encode for AVCapturePhotoQualityPrioritization {
40    const ENCODING: Encoding = NSInteger::ENCODING;
41}
42
43unsafe impl RefEncode for AVCapturePhotoQualityPrioritization {
44    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
45}
46
47/// Constants indicating whether the output is ready to receive capture requests.
48///
49///
50/// Indicates that the session is not running and the output is not ready to receive requests.
51///
52/// Indicates that the output is ready to receive new requests.
53///
54/// Indicates that the output is not ready to receive requests and may be ready shortly.
55///
56/// Indicates that the output is not ready to receive requests for a longer duration because it is busy capturing.
57///
58/// Indicates that the output is not ready to receive requests for a longer duration because it is busy processing.
59///
60/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturephotooutputcapturereadiness?language=objc)
61// NS_ENUM
62#[repr(transparent)]
63#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
64pub struct AVCapturePhotoOutputCaptureReadiness(pub NSInteger);
65impl AVCapturePhotoOutputCaptureReadiness {
66    #[doc(alias = "AVCapturePhotoOutputCaptureReadinessSessionNotRunning")]
67    pub const SessionNotRunning: Self = Self(0);
68    #[doc(alias = "AVCapturePhotoOutputCaptureReadinessReady")]
69    pub const Ready: Self = Self(1);
70    #[doc(alias = "AVCapturePhotoOutputCaptureReadinessNotReadyMomentarily")]
71    pub const NotReadyMomentarily: Self = Self(2);
72    #[doc(alias = "AVCapturePhotoOutputCaptureReadinessNotReadyWaitingForCapture")]
73    pub const NotReadyWaitingForCapture: Self = Self(3);
74    #[doc(alias = "AVCapturePhotoOutputCaptureReadinessNotReadyWaitingForProcessing")]
75    pub const NotReadyWaitingForProcessing: Self = Self(4);
76}
77
78unsafe impl Encode for AVCapturePhotoOutputCaptureReadiness {
79    const ENCODING: Encoding = NSInteger::ENCODING;
80}
81
82unsafe impl RefEncode for AVCapturePhotoOutputCaptureReadiness {
83    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
84}
85
86extern_class!(
87    /// AVCapturePhotoOutput is a concrete subclass of AVCaptureOutput that supersedes AVCaptureStillImageOutput as the preferred interface for capturing photos. In addition to capturing all flavors of still image supported by AVCaptureStillImageOutput, it supports Live Photo capture, preview-sized image delivery, wide color, RAW, RAW+JPG and RAW+DNG formats.
88    ///
89    ///
90    /// Taking a photo is multi-step process. Clients wishing to build a responsive UI need to know about the progress of a photo capture request as it advances from capture to processing to finished delivery. AVCapturePhotoOutput informs clients of photo capture progress through a delegate protocol. To take a picture, a client instantiates and configures an AVCapturePhotoSettings object, then calls AVCapturePhotoOutput's -capturePhotoWithSettings:delegate:, passing a delegate to be informed when events relating to the photo capture occur (e.g., the photo is about to be captured, the photo has been captured but not processed yet, the Live Photo movie is ready, etc.).
91    ///
92    /// Some AVCapturePhotoSettings properties can be set to "Auto", such as flashMode. When set to AVCaptureFlashModeAuto, the photo output decides at capture time whether the current scene and lighting conditions require use of the flash. Thus the client doesn't know with certainty which features will be enabled when making the capture request. With the first and each subsequent delegate callback, the client is provided an AVCaptureResolvedPhotoSettings instance that indicates the settings that were applied to the capture. All "Auto" features have now been resolved to on or off. The AVCaptureResolvedPhotoSettings object passed in the client's delegate callbacks has a uniqueID identical to the AVCapturePhotoSettings request. This uniqueID allows clients to pair unresolved and resolved settings objects. See AVCapturePhotoCaptureDelegate below for a detailed discussion of the delegate callbacks.
93    ///
94    /// Enabling certain photo features (Live Photo capture and high resolution capture) requires a reconfiguration of the capture render pipeline. Clients wishing to opt in for these features should call -setLivePhotoCaptureEnabled: and/or -setHighResolutionCaptureEnabled: before calling -startRunning on the AVCaptureSession. Changing any of these properties while the session is running requires a disruptive reconfiguration of the capture render pipeline. Live Photo captures in progress will be ended immediately; unfulfilled photo requests will be aborted; video preview will temporarily freeze. If you wish to capture Live Photos containing sound, you must add an audio AVCaptureDeviceInput to your AVCaptureSession.
95    ///
96    /// Simultaneous Live Photo capture and MovieFileOutput capture is not supported. If an AVCaptureMovieFileOutput is added to your session, AVCapturePhotoOutput's livePhotoCaptureSupported property returns NO. Note that simultaneous Live Photo capture and AVCaptureVideoDataOutput is supported.
97    ///
98    /// AVCaptureStillImageOutput and AVCapturePhotoOutput may not both be added to a capture session. You must use one or the other. If you add both to a session, a NSInvalidArgumentException is thrown.
99    ///
100    /// AVCapturePhotoOutput implicitly supports wide color photo capture, following the activeColorSpace of the source AVCaptureDevice. If the source device's activeColorSpace is AVCaptureColorSpace_P3_D65, photos are encoded with wide color information, unless you've specified an output format of '420v', which does not support wide color.
101    ///
102    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturephotooutput?language=objc)
103    #[unsafe(super(AVCaptureOutput, NSObject))]
104    #[derive(Debug, PartialEq, Eq, Hash)]
105    #[cfg(feature = "AVCaptureOutputBase")]
106    pub struct AVCapturePhotoOutput;
107);
108
109#[cfg(feature = "AVCaptureOutputBase")]
110extern_conformance!(
111    unsafe impl NSObjectProtocol for AVCapturePhotoOutput {}
112);
113
114#[cfg(feature = "AVCaptureOutputBase")]
115impl AVCapturePhotoOutput {
116    extern_methods!(
117        #[unsafe(method(init))]
118        #[unsafe(method_family = init)]
119        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
120
121        #[unsafe(method(new))]
122        #[unsafe(method_family = new)]
123        pub unsafe fn new() -> Retained<Self>;
124
125        /// Method for initiating a photo capture request with progress monitoring through the supplied delegate.
126        ///
127        ///
128        /// Parameter `settings`: An AVCapturePhotoSettings object you have configured. May not be nil.
129        ///
130        /// Parameter `delegate`: An object conforming to the AVCapturePhotoCaptureDelegate protocol. This object's delegate methods are called back as the photo advances from capture to processing to finished delivery. May not be nil.
131        ///
132        ///
133        /// This method initiates a photo capture. The receiver copies your provided settings to prevent unintentional mutation. It is illegal to re-use settings. The receiver throws an NSInvalidArgumentException if your settings.uniqueID matches that of any previously used settings. This method is used to initiate all flavors of photo capture: single photo, RAW capture with or without a processed image (such as a JPEG), bracketed capture, and Live Photo.
134        ///
135        /// Clients need not wait for a capture photo request to complete before issuing another request. This is true for single photo captures as well as Live Photos, where movie complements of adjacent photo captures are allowed to overlap.
136        ///
137        /// This method validates your settings and enforces the following rules in order to ensure deterministic behavior. If any of these rules are violated, a NSInvalidArgumentException is thrown.
138        /// RAW rules:
139        /// See +isBayerRAWPixelFormat: and +isAppleProRAWPixelFormat: on the difference between Bayer RAW and Apple ProRAW pixel formats.
140        /// Common RAW rules:
141        /// - If rawPhotoPixelFormatType is non-zero, it must be present in the receiver's -availableRawPhotoPixelFormatTypes array.
142        /// - If rawPhotoPixelFormatType is non-zero, your delegate must respond to -captureOutput:didFinishProcessingRawPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:.
143        /// - If rawPhotoPixelFormatType is non-zero, highResolutionPhotoEnabled may be YES or NO, but the setting only applies to the processed image, if you've specified one.
144        /// - If rawPhotoPixelFormatType is non-zero, constantColorEnabled must be set to NO.
145        /// - If rawFileType is specified, it must be present in -availableRawPhotoFileTypes and must support the rawPhotoPixelFormatType specified using -supportedRawPhotoPixelFormatTypesForFileType:.
146        /// Bayer RAW rules (isBayerRAWPixelFormat: returns yes for rawPhotoPixelFormatType):
147        /// - photoQualityPrioritization must be set to AVCapturePhotoQualityPrioritizationSpeed (deprecated autoStillImageStabilizationEnabled must be set to NO).
148        /// - the videoZoomFactor of the source device and the videoScaleAndCropFactor of the photo output's video connection must both be 1.0. Ensure no zoom is applied before requesting a RAW capture, and don't change the zoom during RAW capture.
149        /// Apple ProRAW rules (isAppleProRAWPixelFormat: returns yes for rawPhotoPixelFormatType):
150        /// - livePhotoMovieFileURL must be nil in AVCapturePhotoSettings settings
151        /// - autoContentAwareDistortionCorrectionEnabled will automatically be disabled in AVCapturePhotoSettings
152        /// - autoRedEyeReductionEnabled will automatically be disabled in AVCapturePhotoSettings
153        /// - portraitEffectsMatteDeliveryEnabled will automatically be disabled in AVCapturePhotoSettings
154        /// - enabledSemanticSegmentationMatteTypes will automatically be cleared in AVCapturePhotoSettings
155        /// Processed Format rules:
156        /// - If format is non-nil, a kCVPixelBufferPixelFormatTypeKey or AVVideoCodecKey must be present. You cannot specify both.
157        /// - If format has a kCVPixelBufferPixelFormatTypeKey, its value must be present in the receiver's -availablePhotoPixelFormatTypes array.
158        /// - If format has an AVVideoCodecKey, its value must be present in the receiver's -availablePhotoCodecTypes array.
159        /// - If format is non-nil, your delegate must respond to -captureOutput:didFinishProcessingPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:.
160        /// - If processedFileType is specified, it must be present in -availablePhotoFileTypes and must support the format's specified kCVPixelBufferPixelFormatTypeKey (using -supportedPhotoPixelFormatTypesForFileType:) or AVVideoCodecKey (using -supportedPhotoCodecTypesForFileType:).
161        /// - The photoQualityPrioritization you specify may not be a greater number than the photo output's maxPhotoQualityPrioritization. You must set your AVCapturePhotoOutput maxPhotoQualityPrioritization up front.
162        /// Flash rules:
163        /// - The specified flashMode must be present in the receiver's -supportedFlashModes array.
164        /// Live Photo rules:
165        /// - The receiver's livePhotoCaptureEnabled must be YES if settings.livePhotoMovieURL is non-nil.
166        /// - If settings.livePhotoMovieURL is non-nil, the receiver's livePhotoCaptureSuspended property must be set to NO.
167        /// - If settings.livePhotoMovieURL is non-nil, it must be a file URL that's accessible to your app's sandbox.
168        /// - If settings.livePhotoMovieURL is non-nil, your delegate must respond to -captureOutput:didFinishProcessingLivePhotoToMovieFileAtURL:duration:photoDisplayTime:resolvedSettings:error:.
169        /// Bracketed capture rules:
170        /// - bracketedSettings.count must be
171        /// <
172        /// = the receiver's maxBracketedCapturePhotoCount property.
173        /// - For manual exposure brackets, ISO value must be within the source device activeFormat's minISO and maxISO values.
174        /// - For manual exposure brackets, exposureDuration value must be within the source device activeFormat's minExposureDuration and maxExposureDuration values.
175        /// - For auto exposure brackets, exposureTargetBias value must be within the source device's minExposureTargetBias and maxExposureTargetBias values.
176        /// Deferred Photo Delivery rules:
177        /// - If the receiver's autoDeferredPhotoDeliveryEnabled is YES, your delegate must respond to -captureOutput:didFinishCapturingDeferredPhotoProxy:error:.
178        /// - The maxPhotoDimensions setting for 24MP (5712, 4284), when supported, is only serviced as 24MP via deferred photo delivery.
179        /// Color space rules:
180        /// - Photo capture is not supported when AVCaptureDevice has selected AVCaptureColorSpace_AppleLog or AVCaptureColorSpace_AppleLog2 as color space.
181        #[unsafe(method(capturePhotoWithSettings:delegate:))]
182        #[unsafe(method_family = none)]
183        pub unsafe fn capturePhotoWithSettings_delegate(
184            &self,
185            settings: &AVCapturePhotoSettings,
186            delegate: &ProtocolObject<dyn AVCapturePhotoCaptureDelegate>,
187        );
188
189        /// An array of AVCapturePhotoSettings instances for which the receiver is prepared to capture.
190        ///
191        ///
192        /// See also setPreparedPhotoSettingsArray:completionHandler:
193        /// Some types of photo capture, such as bracketed captures and RAW captures, require the receiver to allocate additional buffers or prepare other resources. To prevent photo capture requests from executing slowly due to lazy resource allocation, you may call -setPreparedPhotoSettingsArray:completionHandler: with an array of settings objects representative of the types of capture you will be performing (e.g., settings for a bracketed capture, RAW capture, and/or still image stabilization capture). By default, the receiver prepares sufficient resources to capture photos with default settings, +[AVCapturePhotoSettings photoSettings].
194        #[unsafe(method(preparedPhotoSettingsArray))]
195        #[unsafe(method_family = none)]
196        pub unsafe fn preparedPhotoSettingsArray(
197            &self,
198        ) -> Retained<NSArray<AVCapturePhotoSettings>>;
199
200        #[cfg(feature = "block2")]
201        /// Method allowing the receiver to prepare resources in advance for future -capturePhotoWithSettings:delegate: requests.
202        ///
203        ///
204        /// Parameter `preparedPhotoSettingsArray`: An array of AVCapturePhotoSettings instances indicating the types of capture for which the receiver should prepare resources.
205        ///
206        /// Parameter `completionHandler`: A completion block to be fired on a serial dispatch queue once the receiver has finished preparing. You may pass nil to indicate you do not wish to be called back when preparation is complete.
207        ///
208        ///
209        /// Some types of photo capture, such as bracketed captures and RAW captures, require the receiver to allocate additional buffers or prepare other resources. To prevent photo capture requests from executing slowly due to lazy resource allocation, you may call this method with an array of settings objects representative of the types of capture you will be performing (e.g., settings for a bracketed capture, RAW capture, and/or still image stabilization capture). You may call this method even before calling -[AVCaptureSession startRunning] in order to hint the receiver up front which features you'll be utilizing. Each time you call this method with an array of settings, the receiver evaluates what additional resources it needs to allocate, as well as existing resources that can be reclaimed, and calls back your completionHandler when it has finished preparing (and possibly reclaiming) needed resources. By default, the receiver prepares sufficient resources to capture photos with default settings, +[AVCapturePhotoSettings photoSettings]. If you wish to reclaim all possible resources, you may call this method with an empty array.
210        ///
211        /// Preparation for photo capture is always optional. You may call -capturePhotoWithSettings:delegate: without first calling -setPreparedPhotoSettingsArray:completionHandler:, but be advised that some of your photo captures may execute slowly as additional resources are allocated just-in-time.
212        ///
213        /// If you call this method while your AVCaptureSession is not running, your completionHandler does not fire immediately. It only fires once you've called -[AVCaptureSession startRunning], and the needed resources have actually been prepared. If you call -setPreparedPhotoSettingsArray:completionHandler: with an array of settings, and then call it a second time, your first prepare call's completionHandler fires immediately with prepared == NO.
214        ///
215        /// Prepared settings persist across session starts/stops and committed configuration changes. This property participates in -[AVCaptureSession beginConfiguration] / -[AVCaptureSession commitConfiguration] deferred work behavior. That is, if you call -[AVCaptureSession beginConfiguration], change your session's input/output topology, and call this method, preparation is deferred until you call -[AVCaptureSession commitConfiguration], enabling you to atomically commit a new configuration as well as prepare to take photos in that new configuration.
216        #[unsafe(method(setPreparedPhotoSettingsArray:completionHandler:))]
217        #[unsafe(method_family = none)]
218        pub unsafe fn setPreparedPhotoSettingsArray_completionHandler(
219            &self,
220            prepared_photo_settings_array: &NSArray<AVCapturePhotoSettings>,
221            completion_handler: Option<&block2::DynBlock<dyn Fn(Bool, *mut NSError)>>,
222        );
223
224        /// An array of kCVPixelBufferPixelFormatTypeKey values that are currently supported by the receiver.
225        ///
226        ///
227        /// If you wish to capture a photo in an uncompressed format, such as 420f, 420v, or BGRA, you must ensure that the format you want is present in the receiver's availablePhotoPixelFormatTypes array. If you've not yet added your receiver to an AVCaptureSession with a video source, no pixel format types are available. This property is key-value observable.
228        #[unsafe(method(availablePhotoPixelFormatTypes))]
229        #[unsafe(method_family = none)]
230        pub unsafe fn availablePhotoPixelFormatTypes(&self) -> Retained<NSArray<NSNumber>>;
231
232        #[cfg(feature = "AVVideoSettings")]
233        /// An array of AVVideoCodecKey values that are currently supported by the receiver.
234        ///
235        ///
236        /// If you wish to capture a photo in a compressed format, such as JPEG, you must ensure that the format you want is present in the receiver's availablePhotoCodecTypes array. If you've not yet added your receiver to an AVCaptureSession with a video source, no codec types are available. This property is key-value observable.
237        #[unsafe(method(availablePhotoCodecTypes))]
238        #[unsafe(method_family = none)]
239        pub unsafe fn availablePhotoCodecTypes(&self) -> Retained<NSArray<AVVideoCodecType>>;
240
241        #[cfg(feature = "AVVideoSettings")]
242        /// An array of available AVVideoCodecType values that may be used for the raw photo.
243        ///
244        ///
245        /// Not all codecs can be used for all rawPixelFormatType values and this call will show all of the possible codecs available. To check if a codec is available for a specific rawPixelFormatType and rawFileType, one should use supportedRawPhotoCodecTypesForRawPhotoPixelFormatType:fileType:.
246        #[unsafe(method(availableRawPhotoCodecTypes))]
247        #[unsafe(method_family = none)]
248        pub unsafe fn availableRawPhotoCodecTypes(&self) -> Retained<NSArray<AVVideoCodecType>>;
249
250        /// Indicates whether the current configuration supports Apple ProRAW pixel formats.
251        ///
252        ///
253        /// The AVCapturePhotoSettings appleProRAWEnabled property may only be set to YES if this property returns YES. This property is key-value observable.
254        #[unsafe(method(isAppleProRAWSupported))]
255        #[unsafe(method_family = none)]
256        pub unsafe fn isAppleProRAWSupported(&self) -> bool;
257
258        /// Indicates whether the photo output is configured for delivery of Apple ProRAW pixel formats as well as Bayer RAW formats.
259        ///
260        ///
261        /// Setting this property to YES will enable support for taking photos in Apple ProRAW pixel formats. These formats will be added to -availableRawPhotoPixelFormatTypes after any existing Bayer RAW formats. Compared to photos taken with a Bayer RAW format, these photos will be demosaiced and partially processed. They are still scene-referred, and allow capturing RAW photos in modes where there is no traditional sensor/Bayer RAW available. Examples are any modes that rely on fusion of multiple captures. Use +isBayerRAWPixelFormat: to determine if a pixel format in -availableRawPhotoPixelFormatTypes is a Bayer RAW format, and +isAppleProRAWPixelFormat: to determine if it is an Apple ProRAW format. When writing an Apple ProRAW buffer to a DNG file, the resulting file is known as "Linear DNG". Apple ProRAW formats are not supported on all platforms and devices. This property may only be set to YES if appleProRAWSupported returns YES. This property is key-value observable.
262        ///
263        /// Enabling this property requires a lengthy reconfiguration of the capture render pipeline, so you should set this property to YES before calling -[AVCaptureSession startRunning].
264        #[unsafe(method(isAppleProRAWEnabled))]
265        #[unsafe(method_family = none)]
266        pub unsafe fn isAppleProRAWEnabled(&self) -> bool;
267
268        /// Setter for [`isAppleProRAWEnabled`][Self::isAppleProRAWEnabled].
269        #[unsafe(method(setAppleProRAWEnabled:))]
270        #[unsafe(method_family = none)]
271        pub unsafe fn setAppleProRAWEnabled(&self, apple_pro_raw_enabled: bool);
272
273        /// Returns YES if the given pixel format is a Bayer RAW format.
274        ///
275        ///
276        /// May be used to distinguish Bayer RAW from Apple ProRAW pixel formats in -availableRawPhotoPixelFormatTypes once appleProRAWEnabled has been set to YES.
277        #[unsafe(method(isBayerRAWPixelFormat:))]
278        #[unsafe(method_family = none)]
279        pub unsafe fn isBayerRAWPixelFormat(pixel_format: OSType) -> bool;
280
281        /// Returns YES if the given pixel format is an Apple ProRAW format.
282        ///
283        ///
284        /// May be used to distinguish Bayer RAW from Apple ProRAW pixel formats in -availableRawPhotoPixelFormatTypes once appleProRAWEnabled has been set to YES.
285        ///
286        /// See appleProRAWEnabled for more information on Apple ProRAW.
287        #[unsafe(method(isAppleProRAWPixelFormat:))]
288        #[unsafe(method_family = none)]
289        pub unsafe fn isAppleProRAWPixelFormat(pixel_format: OSType) -> bool;
290
291        /// An array of RAW CVPixelBufferPixelFormatTypeKey values that are currently supported by the receiver.
292        ///
293        ///
294        /// If you wish to capture a RAW photo, you must ensure that the RAW format you want is present in the receiver's availableRawPhotoPixelFormatTypes array. If you've not yet added your receiver to an AVCaptureSession with a video source, no RAW formats are available. See AVCapturePhotoOutput.appleProRAWEnabled on how to enable support for partially processed RAW formats. This property is key-value observable. RAW capture is not supported on all platforms.
295        #[unsafe(method(availableRawPhotoPixelFormatTypes))]
296        #[unsafe(method_family = none)]
297        pub unsafe fn availableRawPhotoPixelFormatTypes(&self) -> Retained<NSArray<NSNumber>>;
298
299        #[cfg(feature = "AVMediaFormat")]
300        /// An array of AVFileType values that are currently supported by the receiver.
301        ///
302        ///
303        /// If you wish to capture a photo that is formatted for a particular file container, such as HEIF or DICOM, you must ensure that the fileType you desire is present in the receiver's availablePhotoFileTypes array. If you've not yet added your receiver to an AVCaptureSession with a video source, no file types are available. This property is key-value observable.
304        #[unsafe(method(availablePhotoFileTypes))]
305        #[unsafe(method_family = none)]
306        pub unsafe fn availablePhotoFileTypes(&self) -> Retained<NSArray<AVFileType>>;
307
308        #[cfg(feature = "AVMediaFormat")]
309        /// An array of AVFileType values that are currently supported by the receiver for RAW capture.
310        ///
311        ///
312        /// If you wish to capture a RAW photo that is formatted for a particular file container, such as DNG, you must ensure that the fileType you desire is present in the receiver's availableRawPhotoFileTypes array. If you've not yet added your receiver to an AVCaptureSession with a video source, no file types are available. This property is key-value observable.
313        #[unsafe(method(availableRawPhotoFileTypes))]
314        #[unsafe(method_family = none)]
315        pub unsafe fn availableRawPhotoFileTypes(&self) -> Retained<NSArray<AVFileType>>;
316
317        #[cfg(feature = "AVMediaFormat")]
318        /// An array of pixel format type values that are currently supported by the receiver for a particular file container.
319        ///
320        ///
321        /// Parameter `fileType`: The AVFileType container type intended for storage of a photo.
322        ///
323        /// Returns: An array of CVPixelBufferPixelFormatTypeKey values supported by the receiver for the file type in question.
324        ///
325        ///
326        /// If you wish to capture a photo for storage in a particular file container, such as TIFF, you must ensure that the photo pixel format type you request is valid for that file type. If no pixel format types are supported for a given fileType, an empty array is returned. If you've not yet added your receiver to an AVCaptureSession with a video source, no pixel format types are supported.
327        #[unsafe(method(supportedPhotoPixelFormatTypesForFileType:))]
328        #[unsafe(method_family = none)]
329        pub unsafe fn supportedPhotoPixelFormatTypesForFileType(
330            &self,
331            file_type: &AVFileType,
332        ) -> Retained<NSArray<NSNumber>>;
333
334        #[cfg(all(feature = "AVMediaFormat", feature = "AVVideoSettings"))]
335        /// An array of AVVideoCodecKey values that are currently supported by the receiver for a particular file container.
336        ///
337        ///
338        /// Parameter `fileType`: The AVFileType container type intended for storage of a photo.
339        ///
340        /// Returns: An array of AVVideoCodecKey values supported by the receiver for the file type in question.
341        ///
342        ///
343        /// If you wish to capture a photo for storage in a particular file container, such as HEIF, you must ensure that the photo codec type you request is valid for that file type. If no codec types are supported for a given fileType, an empty array is returned. If you've not yet added your receiver to an AVCaptureSession with a video source, no codec types are supported.
344        #[unsafe(method(supportedPhotoCodecTypesForFileType:))]
345        #[unsafe(method_family = none)]
346        pub unsafe fn supportedPhotoCodecTypesForFileType(
347            &self,
348            file_type: &AVFileType,
349        ) -> Retained<NSArray<AVVideoCodecType>>;
350
351        #[cfg(all(feature = "AVMediaFormat", feature = "AVVideoSettings"))]
352        /// An array of AVVideoCodecType values that are currently supported by the receiver for a particular file container and raw pixel format.
353        ///
354        ///
355        /// Parameter `pixelFormatType`: A Bayer RAW or Apple ProRAW pixel format OSType (defined in CVPixelBuffer.h).
356        ///
357        /// Parameter `fileType`: The AVFileType container type intended for storage of a photo which can be retrieved from -availableRawPhotoFileTypes.
358        ///
359        /// Returns: An array of AVVideoCodecType values supported by the receiver for the file type and and raw pixel format in question.
360        ///
361        ///
362        /// If you wish to capture a raw photo for storage using a Bayer RAW or Apple ProRAW pixel format and to be stored in a file container, such as DNG, you must ensure that the codec type you request is valid for that file and pixel format type. If no RAW codec types are supported for a given file type and/or pixel format type, an empty array is returned. If you have not yet added your receiver to an AVCaptureSession with a video source, an empty array is returned.
363        #[unsafe(method(supportedRawPhotoCodecTypesForRawPhotoPixelFormatType:fileType:))]
364        #[unsafe(method_family = none)]
365        pub unsafe fn supportedRawPhotoCodecTypesForRawPhotoPixelFormatType_fileType(
366            &self,
367            pixel_format_type: OSType,
368            file_type: &AVFileType,
369        ) -> Retained<NSArray<AVVideoCodecType>>;
370
371        #[cfg(feature = "AVMediaFormat")]
372        /// An array of CVPixelBufferPixelFormatType values that are currently supported by the receiver for a particular file container.
373        ///
374        ///
375        /// Parameter `fileType`: The AVFileType container type intended for storage of a photo.
376        ///
377        /// Returns: An array of CVPixelBufferPixelFormatType values supported by the receiver for the file type in question.
378        ///
379        ///
380        /// If you wish to capture a photo for storage in a particular file container, such as DNG, you must ensure that the RAW pixel format type you request is valid for that file type. If no RAW pixel format types are supported for a given fileType, an empty array is returned. If you've not yet added your receiver to an AVCaptureSession with a video source, no pixel format types are supported.
381        #[unsafe(method(supportedRawPhotoPixelFormatTypesForFileType:))]
382        #[unsafe(method_family = none)]
383        pub unsafe fn supportedRawPhotoPixelFormatTypesForFileType(
384            &self,
385            file_type: &AVFileType,
386        ) -> Retained<NSArray<NSNumber>>;
387
388        /// Indicates the highest quality the receiver should be prepared to output on a capture-by-capture basis.
389        ///
390        ///
391        /// Default value is AVCapturePhotoQualityPrioritizationBalanced when attached to an AVCaptureSession, and AVCapturePhotoQualityPrioritizationSpeed when attached to an AVCaptureMultiCamSession. The AVCapturePhotoOutput is capable of applying a variety of techniques to improve photo quality (reduce noise, preserve detail in low light, freeze motion, etc). Some techniques improve image quality at the expense of speed (shot-to-shot time). Before starting your session, you may set this property to indicate the highest quality prioritization you intend to request when calling -capturePhotoWithSettings:delegate:. When configuring an AVCapturePhotoSettings object, you may not exceed this quality prioritization level, but you may select a lower (speedier) prioritization level.
392        ///
393        /// Changing the maxPhotoQualityPrioritization while the session is running can result in a lengthy rebuild of the session in which video preview is disrupted.
394        ///
395        /// Setting the maxPhotoQualityPrioritization to .quality will turn on optical image stabilization if the -isHighPhotoQualitySupported of the source device's -activeFormat is true.
396        #[unsafe(method(maxPhotoQualityPrioritization))]
397        #[unsafe(method_family = none)]
398        pub unsafe fn maxPhotoQualityPrioritization(&self) -> AVCapturePhotoQualityPrioritization;
399
400        /// Setter for [`maxPhotoQualityPrioritization`][Self::maxPhotoQualityPrioritization].
401        #[unsafe(method(setMaxPhotoQualityPrioritization:))]
402        #[unsafe(method_family = none)]
403        pub unsafe fn setMaxPhotoQualityPrioritization(
404            &self,
405            max_photo_quality_prioritization: AVCapturePhotoQualityPrioritization,
406        );
407
408        /// Specifies whether fast capture prioritization is supported.
409        ///
410        ///
411        /// Fast capture prioritization allows capture quality to be automatically reduced from the selected AVCapturePhotoQualityPrioritization to ensure the photo output can keep up when captures are requested in rapid succession. Fast capture prioritization is only supported for certain AVCaptureSession sessionPresets and AVCaptureDevice activeFormats and only when responsiveCaptureEnabled is YES. When switching cameras or formats this property may change. When this property changes from YES to NO, fastCapturePrioritizationEnabled also reverts to NO. If you've previously opted in for fast capture prioritization and then change configurations, you may need to set fastCapturePrioritizationEnabled = YES again.
412        #[unsafe(method(isFastCapturePrioritizationSupported))]
413        #[unsafe(method_family = none)]
414        pub unsafe fn isFastCapturePrioritizationSupported(&self) -> bool;
415
416        /// Setter for [`isFastCapturePrioritizationSupported`][Self::isFastCapturePrioritizationSupported].
417        #[unsafe(method(setFastCapturePrioritizationSupported:))]
418        #[unsafe(method_family = none)]
419        pub unsafe fn setFastCapturePrioritizationSupported(
420            &self,
421            fast_capture_prioritization_supported: bool,
422        );
423
424        /// Specifies whether fast capture prioritization is enabled.
425        ///
426        ///
427        /// This property defaults to NO. This property may only be set to YES if fastCapturePrioritizationSupported is YES, otherwise an NSInvalidArgumentException is thrown. By setting this property to YES, the photo output prepares itself to automatically reduce capture quality from the selected AVCapturePhotoQualityPrioritization when needed to keep up with rapid capture requests. In many cases the slightly reduced quality is preferable to missing the moment entirely. If you intend to use fast capture prioritization, you should set this property to YES before calling -[AVCaptureSession startRunning] or within -[AVCaptureSession beginConfiguration] and -[AVCaptureSession commitConfiguration] while running.
428        #[unsafe(method(isFastCapturePrioritizationEnabled))]
429        #[unsafe(method_family = none)]
430        pub unsafe fn isFastCapturePrioritizationEnabled(&self) -> bool;
431
432        /// Setter for [`isFastCapturePrioritizationEnabled`][Self::isFastCapturePrioritizationEnabled].
433        #[unsafe(method(setFastCapturePrioritizationEnabled:))]
434        #[unsafe(method_family = none)]
435        pub unsafe fn setFastCapturePrioritizationEnabled(
436            &self,
437            fast_capture_prioritization_enabled: bool,
438        );
439
440        /// Indicates whether the deferred photo delivery feature is supported by the receiver.
441        ///
442        ///
443        /// This property may change as the session's -sessionPreset or source device's -activeFormat change. When deferred photo delivery is not supported, your capture requests always resolve their AVCaptureResolvedPhotoSettings.deferredPhotoProxyDimensions to { 0, 0 }. This property is key-value observable.
444        ///
445        /// Automatic deferred photo delivery can produce a lightweight photo representation, called a "proxy", at the time of capture that can later be processed to completion while improving camera responsiveness.  When it's appropriate for the receiver to deliver a photo proxy for deferred processing, the delegate callback -captureOutput:didFinishCapturingDeferredPhotoProxy:error: will be invoked instead of -captureOutput:didFinishProcessingPhoto:error:.  See the documentation for AVCaptureDeferredPhotoProxy for more details.
446        #[unsafe(method(isAutoDeferredPhotoDeliverySupported))]
447        #[unsafe(method_family = none)]
448        pub unsafe fn isAutoDeferredPhotoDeliverySupported(&self) -> bool;
449
450        /// Specifies whether automatic deferred photo delivery is enabled.
451        ///
452        ///
453        /// Setting this value to either YES or NO requires a lengthy reconfiguration of the capture pipeline, so you should set this property before calling -[AVCaptureSession startRunning].  Setting this property to YES throws an NSInvalidArgumentException if autoDeferredPhotoDeliverySupported is NO.
454        #[unsafe(method(isAutoDeferredPhotoDeliveryEnabled))]
455        #[unsafe(method_family = none)]
456        pub unsafe fn isAutoDeferredPhotoDeliveryEnabled(&self) -> bool;
457
458        /// Setter for [`isAutoDeferredPhotoDeliveryEnabled`][Self::isAutoDeferredPhotoDeliveryEnabled].
459        #[unsafe(method(setAutoDeferredPhotoDeliveryEnabled:))]
460        #[unsafe(method_family = none)]
461        pub unsafe fn setAutoDeferredPhotoDeliveryEnabled(
462            &self,
463            auto_deferred_photo_delivery_enabled: bool,
464        );
465
466        /// Indicates whether the still image stabilization feature is supported by the receiver.
467        ///
468        ///
469        /// This property may change as the session's -sessionPreset or source device's -activeFormat change. When still image stabilization is not supported, your capture requests always resolve stillImageStabilizationEnabled to NO. This property is key-value observable.
470        ///
471        /// As of iOS 13 hardware, the AVCapturePhotoOutput is capable of applying a variety of multi-image fusion techniques to improve photo quality (reduce noise, preserve detail in low light, freeze motion, etc), all of which have been previously lumped under the stillImageStabilization moniker. This property should no longer be used as it no longer provides meaningful information about the techniques used to improve quality in a photo capture. Instead, you should use -maxPhotoQualityPrioritization to indicate the highest quality prioritization level you might request in a photo capture, understanding that the higher the quality, the longer the potential wait. You may also use AVCapturePhotoSettings' photoQualityPrioritization property to specify a prioritization level for a particular photo capture, and then query the AVCaptureResolvedPhotoSettings photoProcessingTimeRange property to find out how long it might take to receive the resulting photo in your delegate callback.
472        #[deprecated]
473        #[unsafe(method(isStillImageStabilizationSupported))]
474        #[unsafe(method_family = none)]
475        pub unsafe fn isStillImageStabilizationSupported(&self) -> bool;
476
477        /// Indicates whether the current scene is dark enough to warrant use of still image stabilization.
478        ///
479        ///
480        /// This property reports whether the current scene being previewed by the camera is dark enough to benefit from still image stabilization. You can influence this property's answers by setting the photoSettingsForSceneMonitoring property, indicating whether autoStillImageStabilization monitoring should be on or off. If you set autoStillImageStabilization to NO, isStillImageStabilizationScene always reports NO. If you set it to YES, this property returns YES or NO depending on the current scene's lighting conditions. Note that some very dark scenes do not benefit from still image stabilization, but do benefit from flash. By default, this property always returns NO unless you set photoSettingsForSceneMonitoring to a non-nil value. This property may be key-value observed.
481        ///
482        /// As of iOS 13 hardware, the AVCapturePhotoOutput is capable of applying a variety of multi-image fusion techniques to improve photo quality (reduce noise, preserve detail in low light, freeze motion, etc), all of which have been previously lumped under the stillImageStabilization moniker. This property should no longer be used as it no longer provides meaningful information about the techniques used to improve quality in a photo capture. Instead, you should use -maxPhotoQualityPrioritization to indicate the highest quality prioritization level you might request in a photo capture, understanding that the higher the quality, the longer the potential wait. You may also use AVCapturePhotoSettings' photoQualityPrioritization property to specify a prioritization level for a particular photo capture, and then query the AVCaptureResolvedPhotoSettings photoProcessingTimeRange property to find out how long it might take to receive the resulting photo in your delegate callback.
483        #[deprecated]
484        #[unsafe(method(isStillImageStabilizationScene))]
485        #[unsafe(method_family = none)]
486        pub unsafe fn isStillImageStabilizationScene(&self) -> bool;
487
488        /// Indicates whether the virtual device image fusion feature is supported by the receiver.
489        ///
490        ///
491        /// This property may change as the session's -sessionPreset or source device's -activeFormat change. When using a virtual AVCaptureDevice, its constituent camera images can be fused together to improve image quality when this property answers YES. When virtual device fusion is not supported by the current configuration, your capture requests always resolve virtualDeviceFusionEnabled to NO. This property is key-value observable.
492        #[unsafe(method(isVirtualDeviceFusionSupported))]
493        #[unsafe(method_family = none)]
494        pub unsafe fn isVirtualDeviceFusionSupported(&self) -> bool;
495
496        /// Indicates whether the DualCamera image fusion feature is supported by the receiver.
497        ///
498        ///
499        /// This property may change as the session's -sessionPreset or source device's -activeFormat change. When using the AVCaptureDevice with deviceType AVCaptureDeviceTypeBuiltInDualCamera, the wide-angle and telephoto camera images can be fused together to improve image quality in some configurations. When DualCamera image fusion is not supported by the current configuration, your capture requests always resolve dualCameraFusionEnabled to NO. This property is key-value observable. As of iOS 13, this property is deprecated in favor of virtualDeviceFusionSupported.
500        #[deprecated]
501        #[unsafe(method(isDualCameraFusionSupported))]
502        #[unsafe(method_family = none)]
503        pub unsafe fn isDualCameraFusionSupported(&self) -> bool;
504
505        /// Specifies whether the photo output's current configuration supports delivery of photos from constituent cameras of a virtual device.
506        ///
507        ///
508        /// Virtual device constituent photo delivery is only supported for certain AVCaptureSession sessionPresets and AVCaptureDevice activeFormats. When switching cameras or formats this property may change. When this property changes from YES to NO, virtualDeviceConstituentPhotoDeliveryEnabled also reverts to NO. If you've previously opted in for virtual device constituent photo delivery and then change configurations, you may need to set virtualDeviceConstituentPhotoDeliveryEnabled = YES again. This property is key-value observable.
509        #[unsafe(method(isVirtualDeviceConstituentPhotoDeliverySupported))]
510        #[unsafe(method_family = none)]
511        pub unsafe fn isVirtualDeviceConstituentPhotoDeliverySupported(&self) -> bool;
512
513        /// Specifies whether the photo output's current configuration supports delivery of both telephoto and wide images from the DualCamera.
514        ///
515        ///
516        /// DualCamera dual photo delivery is only supported for certain AVCaptureSession sessionPresets and AVCaptureDevice activeFormats. When switching cameras or formats this property may change. When this property changes from YES to NO, dualCameraDualPhotoDeliveryEnabled also reverts to NO. If you've previously opted in for DualCamera dual photo delivery and then change configurations, you may need to set dualCameraDualPhotoDeliveryEnabled = YES again. This property is key-value observable. As of iOS 13, this property is deprecated in favor of virtualDeviceConstituentPhotoDeliverySupported.
517        #[deprecated]
518        #[unsafe(method(isDualCameraDualPhotoDeliverySupported))]
519        #[unsafe(method_family = none)]
520        pub unsafe fn isDualCameraDualPhotoDeliverySupported(&self) -> bool;
521
522        /// Indicates whether the photo output is configured for delivery of photos from constituent cameras of a virtual device.
523        ///
524        ///
525        /// Default value is NO. This property may only be set to YES if virtualDeviceConstituentPhotoDeliverySupported is YES. Virtual device constituent photo delivery requires a lengthy reconfiguration of the capture render pipeline, so if you intend to do any constituent photo delivery captures, you should set this property to YES before calling -[AVCaptureSession startRunning]. See also -[AVCapturePhotoSettings virtualDeviceConstituentPhotoDeliveryEnabledDevices].
526        #[unsafe(method(isVirtualDeviceConstituentPhotoDeliveryEnabled))]
527        #[unsafe(method_family = none)]
528        pub unsafe fn isVirtualDeviceConstituentPhotoDeliveryEnabled(&self) -> bool;
529
530        /// Setter for [`isVirtualDeviceConstituentPhotoDeliveryEnabled`][Self::isVirtualDeviceConstituentPhotoDeliveryEnabled].
531        #[unsafe(method(setVirtualDeviceConstituentPhotoDeliveryEnabled:))]
532        #[unsafe(method_family = none)]
533        pub unsafe fn setVirtualDeviceConstituentPhotoDeliveryEnabled(
534            &self,
535            virtual_device_constituent_photo_delivery_enabled: bool,
536        );
537
538        /// Indicates whether the photo output is configured for delivery of both the telephoto and wide images from the DualCamera.
539        ///
540        ///
541        /// Default value is NO. This property may only be set to YES if dualCameraDualPhotoDeliverySupported is YES. DualCamera dual photo delivery requires a lengthy reconfiguration of the capture render pipeline, so if you intend to do any dual photo delivery captures, you should set this property to YES before calling -[AVCaptureSession startRunning]. See also -[AVCapturePhotoSettings dualCameraDualPhotoDeliveryEnabled]. As of iOS 13, this property is deprecated in favor of virtualDeviceConstituentPhotoDeliveryEnabled.
542        #[deprecated]
543        #[unsafe(method(isDualCameraDualPhotoDeliveryEnabled))]
544        #[unsafe(method_family = none)]
545        pub unsafe fn isDualCameraDualPhotoDeliveryEnabled(&self) -> bool;
546
547        /// Setter for [`isDualCameraDualPhotoDeliveryEnabled`][Self::isDualCameraDualPhotoDeliveryEnabled].
548        #[deprecated]
549        #[unsafe(method(setDualCameraDualPhotoDeliveryEnabled:))]
550        #[unsafe(method_family = none)]
551        pub unsafe fn setDualCameraDualPhotoDeliveryEnabled(
552            &self,
553            dual_camera_dual_photo_delivery_enabled: bool,
554        );
555
556        /// Specifies whether the photo output's current configuration supports delivery of AVCameraCalibrationData in the resultant AVCapturePhoto.
557        ///
558        ///
559        /// Camera calibration data delivery (intrinsics, extrinsics, lens distortion characteristics, etc.) is only supported if virtualDeviceConstituentPhotoDeliveryEnabled is YES and contentAwareDistortionCorrectionEnabled is NO and the source device's geometricDistortionCorrectionEnabled property is set to NO. This property is key-value observable.
560        #[unsafe(method(isCameraCalibrationDataDeliverySupported))]
561        #[unsafe(method_family = none)]
562        pub unsafe fn isCameraCalibrationDataDeliverySupported(&self) -> bool;
563
564        /// An array of AVCaptureFlashMode constants for the current capture session configuration.
565        ///
566        ///
567        /// This property supersedes AVCaptureDevice's isFlashModeSupported: It returns an array of AVCaptureFlashMode constants. To test whether a particular flash mode is supported, use NSArray's containsObject API: [photoOutput.supportedFlashModes containsObject:
568        /// @
569        /// (AVCaptureFlashModeAuto)]. This property is key-value observable.
570        #[unsafe(method(supportedFlashModes))]
571        #[unsafe(method_family = none)]
572        pub unsafe fn supportedFlashModes(&self) -> Retained<NSArray<NSNumber>>;
573
574        /// Indicates whether the receiver supports automatic red-eye reduction for flash captures.
575        ///
576        ///
577        /// Flash images may cause subjects' eyes to appear red, golden, or white. Automatic red-eye reduction detects and corrects for reflected light in eyes, at the cost of additional processing time per image. This property may change as the session's -sessionPreset or source device's -activeFormat change. When red-eye reduction is not supported, your capture requests always resolve redEyeReductionEnabled to NO. This property is key-value observable.
578        #[unsafe(method(isAutoRedEyeReductionSupported))]
579        #[unsafe(method_family = none)]
580        pub unsafe fn isAutoRedEyeReductionSupported(&self) -> bool;
581
582        /// Indicates whether the current scene is dark enough to warrant use of the flash.
583        ///
584        ///
585        /// This property reports whether the current scene being previewed by the camera is dark enough to need the flash. If -supportedFlashModes only contains AVCaptureFlashModeOff, isFlashScene always reports NO. You can influence this property's answers by setting the photoSettingsForSceneMonitoring property, indicating the flashMode you wish to monitor. If you set flashMode to AVCaptureFlashModeOff, isFlashScene always reports NO. If you set it to AVCaptureFlashModeAuto or AVCaptureFlashModeOn, isFlashScene answers YES or NO based on the current scene's lighting conditions. By default, this property always returns NO unless you set photoSettingsForSceneMonitoring to a non-nil value. Note that there is some overlap in the light level ranges that benefit from still image stabilization and flash. If your photoSettingsForSceneMonitoring indicate that both still image stabilization and flash scenes should be monitored, still image stabilization takes precedence, and isFlashScene becomes YES at lower overall light levels. This property may be key-value observed.
586        #[unsafe(method(isFlashScene))]
587        #[unsafe(method_family = none)]
588        pub unsafe fn isFlashScene(&self) -> bool;
589
590        /// Settings that govern the behavior of isFlashScene and isStillImageStabilizationScene.
591        ///
592        ///
593        /// You can influence the return values of isFlashScene and isStillImageStabilizationScene by setting this property, indicating the flashMode and photoQualityPrioritization values that should be considered for scene monitoring. For instance, if you set flashMode to AVCaptureFlashModeOff, isFlashScene always reports NO. If you set it to AVCaptureFlashModeAuto or AVCaptureFlashModeOn, isFlashScene answers YES or NO based on the current scene's lighting conditions. Note that there is some overlap in the light level ranges that benefit from still image stabilization and flash. If your photoSettingsForSceneMonitoring indicate that both still image stabilization and flash scenes should be monitored, still image stabilization takes precedence, and isFlashScene becomes YES at lower overall light levels. The default value for this property is nil. See isStillImageStabilizationScene and isFlashScene for further discussion.
594        #[unsafe(method(photoSettingsForSceneMonitoring))]
595        #[unsafe(method_family = none)]
596        pub unsafe fn photoSettingsForSceneMonitoring(
597            &self,
598        ) -> Option<Retained<AVCapturePhotoSettings>>;
599
600        /// Setter for [`photoSettingsForSceneMonitoring`][Self::photoSettingsForSceneMonitoring].
601        ///
602        /// This is [copied][objc2_foundation::NSCopying::copy] when set.
603        #[unsafe(method(setPhotoSettingsForSceneMonitoring:))]
604        #[unsafe(method_family = none)]
605        pub unsafe fn setPhotoSettingsForSceneMonitoring(
606            &self,
607            photo_settings_for_scene_monitoring: Option<&AVCapturePhotoSettings>,
608        );
609
610        /// Indicates whether the photo render pipeline should be configured to deliver high resolution still images.
611        ///
612        ///
613        /// Some AVCaptureDeviceFormats support outputting higher resolution stills than their streaming resolution (See AVCaptureDeviceFormat.highResolutionStillImageDimensions). Under some conditions, AVCaptureSession needs to set up the photo render pipeline differently to support high resolution still image capture. If you intend to take high resolution still images at all, you should set this property to YES before calling -[AVCaptureSession startRunning]. Once you've opted in for high resolution capture, you are free to issue photo capture requests with or without highResolutionCaptureEnabled in the AVCapturePhotoSettings. If you have not set this property to YES and call capturePhotoWithSettings:delegate: with settings.highResolutionCaptureEnabled set to YES, an NSInvalidArgumentException will be thrown.
614        #[deprecated = "Use maxPhotoDimensions instead."]
615        #[unsafe(method(isHighResolutionCaptureEnabled))]
616        #[unsafe(method_family = none)]
617        pub unsafe fn isHighResolutionCaptureEnabled(&self) -> bool;
618
619        /// Setter for [`isHighResolutionCaptureEnabled`][Self::isHighResolutionCaptureEnabled].
620        #[deprecated = "Use maxPhotoDimensions instead."]
621        #[unsafe(method(setHighResolutionCaptureEnabled:))]
622        #[unsafe(method_family = none)]
623        pub unsafe fn setHighResolutionCaptureEnabled(&self, high_resolution_capture_enabled: bool);
624
625        #[cfg(feature = "objc2-core-media")]
626        /// Indicates the maximum resolution of the requested photo.
627        ///
628        ///
629        /// Set this property to enable requesting of images up to as large as the specified dimensions. Images returned by AVCapturePhotoOutput may be smaller than these dimensions but will never be larger. Once set, images can be requested with any valid maximum photo dimensions by setting AVCapturePhotoSettings.maxPhotoDimensions on a per photo basis. The dimensions set must match one of the dimensions returned by AVCaptureDeviceFormat.supportedMaxPhotoDimensions for the current active format. Changing this property may trigger a lengthy reconfiguration of the capture render pipeline so it is recommended that this is set before calling -[AVCaptureSession startRunning].
630        /// Note: When supported, the 24MP setting (5712, 4284) is only serviced as 24MP when opted-in to autoDeferredPhotoDeliveryEnabled.
631        #[unsafe(method(maxPhotoDimensions))]
632        #[unsafe(method_family = none)]
633        pub unsafe fn maxPhotoDimensions(&self) -> CMVideoDimensions;
634
635        #[cfg(feature = "objc2-core-media")]
636        /// Setter for [`maxPhotoDimensions`][Self::maxPhotoDimensions].
637        #[unsafe(method(setMaxPhotoDimensions:))]
638        #[unsafe(method_family = none)]
639        pub unsafe fn setMaxPhotoDimensions(&self, max_photo_dimensions: CMVideoDimensions);
640
641        /// Specifies the maximum number of photos that may be taken in a single bracket.
642        ///
643        ///
644        /// AVCapturePhotoOutput can only satisfy a limited number of image requests in a single bracket without exhausting system resources. The maximum number of photos that may be taken in a single bracket depends on the size and format of the images being captured, and consequently may vary with AVCaptureSession -sessionPreset and AVCaptureDevice -activeFormat. Some formats do not support bracketed capture at all, and thus this property may return a value of 0. This read-only property is key-value observable. If you call -capturePhotoWithSettings:delegate: with a bracketedSettings whose count exceeds -maxBracketedCapturePhotoCount, an NSInvalidArgumentException is thrown.
645        #[unsafe(method(maxBracketedCapturePhotoCount))]
646        #[unsafe(method_family = none)]
647        pub unsafe fn maxBracketedCapturePhotoCount(&self) -> NSUInteger;
648
649        /// Indicates whether the receiver supports lens stabilization during bracketed captures.
650        ///
651        ///
652        /// The AVCapturePhotoBracketSettings lensStabilizationEnabled property may only be set if this property returns YES. Its value may change as the session's -sessionPreset or input device's -activeFormat changes. This read-only property is key-value observable.
653        #[unsafe(method(isLensStabilizationDuringBracketedCaptureSupported))]
654        #[unsafe(method_family = none)]
655        pub unsafe fn isLensStabilizationDuringBracketedCaptureSupported(&self) -> bool;
656
657        /// Indicates whether the receiver supports Live Photo capture.
658        ///
659        ///
660        /// Live Photo capture is only supported for certain AVCaptureSession sessionPresets and AVCaptureDevice activeFormats. When switching cameras or formats this property may change. When this property changes from YES to NO, livePhotoCaptureEnabled also reverts to NO. If you've previously opted in for Live Photo capture and then change configurations, you may need to set livePhotoCaptureEnabled = YES again.
661        #[unsafe(method(isLivePhotoCaptureSupported))]
662        #[unsafe(method_family = none)]
663        pub unsafe fn isLivePhotoCaptureSupported(&self) -> bool;
664
665        /// Indicates whether the receiver is configured for Live Photo capture.
666        ///
667        ///
668        /// Default value is NO. This property may only be set to YES if livePhotoCaptureSupported is YES. Live Photo capture requires a lengthy reconfiguration of the capture render pipeline, so if you intend to do any Live Photo captures at all, you should set livePhotoCaptureEnabled to YES before calling -[AVCaptureSession startRunning].
669        #[unsafe(method(isLivePhotoCaptureEnabled))]
670        #[unsafe(method_family = none)]
671        pub unsafe fn isLivePhotoCaptureEnabled(&self) -> bool;
672
673        /// Setter for [`isLivePhotoCaptureEnabled`][Self::isLivePhotoCaptureEnabled].
674        #[unsafe(method(setLivePhotoCaptureEnabled:))]
675        #[unsafe(method_family = none)]
676        pub unsafe fn setLivePhotoCaptureEnabled(&self, live_photo_capture_enabled: bool);
677
678        /// Indicates whether Live Photo capture is enabled, but currently suspended.
679        ///
680        ///
681        /// This property allows you to cut current Live Photo movie captures short (for instance, if you suddenly need to do something that you don't want to show up in the Live Photo movie, such as take a non Live Photo capture that makes a shutter sound). By default, livePhotoCaptureSuspended is NO. When you set livePhotoCaptureSuspended = YES, any Live Photo movie captures in progress are trimmed to the current time. Likewise, when you toggle livePhotoCaptureSuspended from YES to NO, subsequent Live Photo movie captures will not contain any samples earlier than the time you un-suspended Live Photo capture. Setting this property to YES throws an NSInvalidArgumentException if livePhotoCaptureEnabled is NO. By default, this property resets to NO when the AVCaptureSession stops. This behavior can be prevented by setting preservesLivePhotoCaptureSuspendedOnSessionStop to YES before stopping the session.
682        #[unsafe(method(isLivePhotoCaptureSuspended))]
683        #[unsafe(method_family = none)]
684        pub unsafe fn isLivePhotoCaptureSuspended(&self) -> bool;
685
686        /// Setter for [`isLivePhotoCaptureSuspended`][Self::isLivePhotoCaptureSuspended].
687        #[unsafe(method(setLivePhotoCaptureSuspended:))]
688        #[unsafe(method_family = none)]
689        pub unsafe fn setLivePhotoCaptureSuspended(&self, live_photo_capture_suspended: bool);
690
691        /// By default, Live Photo capture is resumed when the session stops. This property allows clients to opt out of this and preserve the value of livePhotoCaptureSuspended.
692        ///
693        ///
694        /// Defaults to NO.
695        #[unsafe(method(preservesLivePhotoCaptureSuspendedOnSessionStop))]
696        #[unsafe(method_family = none)]
697        pub unsafe fn preservesLivePhotoCaptureSuspendedOnSessionStop(&self) -> bool;
698
699        /// Setter for [`preservesLivePhotoCaptureSuspendedOnSessionStop`][Self::preservesLivePhotoCaptureSuspendedOnSessionStop].
700        #[unsafe(method(setPreservesLivePhotoCaptureSuspendedOnSessionStop:))]
701        #[unsafe(method_family = none)]
702        pub unsafe fn setPreservesLivePhotoCaptureSuspendedOnSessionStop(
703            &self,
704            preserves_live_photo_capture_suspended_on_session_stop: bool,
705        );
706
707        /// Indicates whether Live Photo movies are trimmed in real time to avoid excessive movement.
708        ///
709        ///
710        /// This property defaults to YES when livePhotoCaptureSupported is YES. Changing this property's value while your session is running will cause a lengthy reconfiguration of the session. You should set livePhotoAutoTrimmingEnabled to YES or NO before calling -[AVCaptureSession startRunning]. When set to YES, Live Photo movies are analyzed in real time and trimmed if there's excessive movement before or after the photo is taken. Nominally, Live Photos are approximately 3 seconds long. With trimming enabled, they may be shorter, depending on movement. This feature prevents common problems such as Live Photo movies containing shoe or pocket shots.
711        #[unsafe(method(isLivePhotoAutoTrimmingEnabled))]
712        #[unsafe(method_family = none)]
713        pub unsafe fn isLivePhotoAutoTrimmingEnabled(&self) -> bool;
714
715        /// Setter for [`isLivePhotoAutoTrimmingEnabled`][Self::isLivePhotoAutoTrimmingEnabled].
716        #[unsafe(method(setLivePhotoAutoTrimmingEnabled:))]
717        #[unsafe(method_family = none)]
718        pub unsafe fn setLivePhotoAutoTrimmingEnabled(
719            &self,
720            live_photo_auto_trimming_enabled: bool,
721        );
722
723        #[cfg(feature = "AVVideoSettings")]
724        /// An array of AVVideoCodecKey values that are currently supported by the receiver for use in the movie complement of a Live Photo.
725        ///
726        ///
727        /// Prior to iOS 11, all Live Photo movie video tracks are compressed using H.264. Beginning in iOS 11, you can select the Live Photo movie video compression format using one of the AVVideoCodecKey strings presented in this property. The system's default (preferred) video codec is always presented first in the list. If you've not yet added your receiver to an AVCaptureSession with a video source, no codecs are available. This property is key-value observable.
728        #[unsafe(method(availableLivePhotoVideoCodecTypes))]
729        #[unsafe(method_family = none)]
730        pub unsafe fn availableLivePhotoVideoCodecTypes(
731            &self,
732        ) -> Retained<NSArray<AVVideoCodecType>>;
733
734        #[cfg(feature = "objc2-core-media")]
735        /// A class method that writes a JPEG sample buffer to an NSData in the JPEG file format.
736        ///
737        ///
738        /// Parameter `JPEGSampleBuffer`: A CMSampleBuffer containing JPEG compressed data.
739        ///
740        /// Parameter `previewPhotoSampleBuffer`: An optional CMSampleBuffer containing pixel buffer image data to be written as a thumbnail image.
741        ///
742        /// Returns: An NSData containing bits in the JPEG file format. May return nil if the re-packaging process fails.
743        ///
744        ///
745        /// AVCapturePhotoOutput's depecrated -captureOutput:didFinishProcessingPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error: callback delivers JPEG photos to clients as CMSampleBuffers. To re-package these buffers in a data format suitable for writing to a JPEG file, you may call this class method, optionally inserting your own metadata into the JPEG CMSampleBuffer first, and optionally passing a preview image to be written to the JPEG file format as a thumbnail image.
746        #[deprecated]
747        #[unsafe(method(JPEGPhotoDataRepresentationForJPEGSampleBuffer:previewPhotoSampleBuffer:))]
748        #[unsafe(method_family = none)]
749        pub unsafe fn JPEGPhotoDataRepresentationForJPEGSampleBuffer_previewPhotoSampleBuffer(
750            jpeg_sample_buffer: &CMSampleBuffer,
751            preview_photo_sample_buffer: Option<&CMSampleBuffer>,
752        ) -> Option<Retained<NSData>>;
753
754        #[cfg(feature = "objc2-core-media")]
755        /// A class method that writes a RAW sample buffer to an NSData containing bits in the DNG file format.
756        ///
757        ///
758        /// Parameter `rawSampleBuffer`: A CMSampleBuffer containing Bayer RAW data.
759        ///
760        /// Parameter `previewPhotoSampleBuffer`: An optional CMSampleBuffer containing pixel buffer image data to be written as a thumbnail image.
761        ///
762        /// Returns: An NSData containing bits in the DNG file format. May return nil if the re-packaging process fails.
763        ///
764        ///
765        /// AVCapturePhotoOutput's deprecated -captureOutput:didFinishProcessingRawPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error: callback delivers RAW photos to clients as CMSampleBuffers. To re-package these buffers in a data format suitable for writing to a DNG file, you may call this class method, optionally inserting your own metadata into the RAW CMSampleBuffer first, and optionally passing a preview image to be written to the DNG file format as a thumbnail image. Only RAW images from Apple built-in cameras are supported.
766        #[deprecated]
767        #[unsafe(method(DNGPhotoDataRepresentationForRawSampleBuffer:previewPhotoSampleBuffer:))]
768        #[unsafe(method_family = none)]
769        pub unsafe fn DNGPhotoDataRepresentationForRawSampleBuffer_previewPhotoSampleBuffer(
770            raw_sample_buffer: &CMSampleBuffer,
771            preview_photo_sample_buffer: Option<&CMSampleBuffer>,
772        ) -> Option<Retained<NSData>>;
773
774        /// A BOOL value specifying whether content aware distortion correction is supported.
775        ///
776        ///
777        /// The rectilinear model used in optical design and by geometric distortion correction only preserves lines but not area, angles, or distance. Thus the wider the field of view of a lens, the greater the areal distortion at the edges of images. Content aware distortion correction, when enabled, intelligently corrects distortions by taking content into consideration, such as faces near the edges of the image. This property returns YES if the session's current configuration allows photos to be captured with content aware distortion correction. When switching cameras or formats or enabling depth data delivery this property may change. When this property changes from YES to NO, contentAwareDistortionCorrectionEnabled also reverts to NO. This property is key-value observable.
778        #[unsafe(method(isContentAwareDistortionCorrectionSupported))]
779        #[unsafe(method_family = none)]
780        pub unsafe fn isContentAwareDistortionCorrectionSupported(&self) -> bool;
781
782        /// A BOOL value specifying whether the photo render pipeline is set up to perform content aware distortion correction.
783        ///
784        ///
785        /// Default is NO. Set to YES if you wish content aware distortion correction to be performed on your AVCapturePhotos. This property may only be set to YES if contentAwareDistortionCorrectionSupported is YES. Note that warping the photos to preserve more natural looking content may result in a small change in field of view compared to what you see in the AVCaptureVideoPreviewLayer. The amount of field of view lost or gained is content specific and may vary from photo to photo. Enabling this property requires a lengthy reconfiguration of the capture render pipeline, so you should set this property to YES before calling -[AVCaptureSession startRunning].
786        #[unsafe(method(isContentAwareDistortionCorrectionEnabled))]
787        #[unsafe(method_family = none)]
788        pub unsafe fn isContentAwareDistortionCorrectionEnabled(&self) -> bool;
789
790        /// Setter for [`isContentAwareDistortionCorrectionEnabled`][Self::isContentAwareDistortionCorrectionEnabled].
791        #[unsafe(method(setContentAwareDistortionCorrectionEnabled:))]
792        #[unsafe(method_family = none)]
793        pub unsafe fn setContentAwareDistortionCorrectionEnabled(
794            &self,
795            content_aware_distortion_correction_enabled: bool,
796        );
797
798        /// A BOOL value specifying whether zero shutter lag is supported.
799        ///
800        ///
801        /// This property returns YES if the session's current configuration allows zero shutter lag. When switching cameras or formats, setting depthDataDeliveryEnabled, or setting virtualDeviceConstituentPhotoDeliveryEnabled this property may change. When this property changes from YES to NO, zeroShutterLagEnabled also reverts to NO. This property is key-value observable.
802        #[unsafe(method(isZeroShutterLagSupported))]
803        #[unsafe(method_family = none)]
804        pub unsafe fn isZeroShutterLagSupported(&self) -> bool;
805
806        /// A BOOL value specifying whether the output is set up to support zero shutter lag.
807        ///
808        ///
809        /// This property may only be set to YES if zeroShutterLagSupported is YES, otherwise an NSInvalidArgumentException is thrown. For apps linked on or after iOS 17 zero shutter lag is automatically enabled when supported. Enabling zero shutter lag reduces or eliminates shutter lag when using AVCapturePhotoQualityPrioritizationBalanced or Quality at the cost of additional memory usage by the photo output. The timestamp of the AVCapturePhoto may be slightly earlier than when -capturePhotoWithSettings:delegate: was called. To minimize camera shake from the user's tapping gesture it is recommended that -capturePhotoWithSettings:delegate: be called as early as possible when handling the touch down event. Zero shutter lag isn't available when using manual exposure or bracketed capture. Changing this property requires a lengthy reconfiguration of the capture render pipeline, so you should set this property to YES before calling -[AVCaptureSession startRunning] or within -[AVCaptureSession beginConfiguration] and -[AVCaptureSession commitConfiguration] while running.
810        #[unsafe(method(isZeroShutterLagEnabled))]
811        #[unsafe(method_family = none)]
812        pub unsafe fn isZeroShutterLagEnabled(&self) -> bool;
813
814        /// Setter for [`isZeroShutterLagEnabled`][Self::isZeroShutterLagEnabled].
815        #[unsafe(method(setZeroShutterLagEnabled:))]
816        #[unsafe(method_family = none)]
817        pub unsafe fn setZeroShutterLagEnabled(&self, zero_shutter_lag_enabled: bool);
818
819        /// A BOOL value specifying whether responsive capture is supported.
820        ///
821        ///
822        /// Enabling responsive capture increases peak and sustained capture rates, and reduces shutter lag at the cost of additional memory usage by the photo output. This property returns YES if the session's current configuration allows responsive capture. When switching cameras or formats, enabling depth data delivery, or enabling zero shutter lag this property may change. Responsive capture is only supported when zero shutter lag is enabled. When this property changes from YES to NO, responsiveCaptureEnabled also reverts to NO. This property is key-value observable.
823        #[unsafe(method(isResponsiveCaptureSupported))]
824        #[unsafe(method_family = none)]
825        pub unsafe fn isResponsiveCaptureSupported(&self) -> bool;
826
827        /// A BOOL value specifying whether the photo output is set up to support responsive capture.
828        ///
829        ///
830        /// This property may only be set to YES if responsiveCaptureSupported is YES, otherwise an NSInvalidArgumentException is thrown. When responsiveCaptureEnabled is YES the captureReadiness property should be used to determine whether new capture requests can be serviced in a reasonable time and whether the shutter control should be available to the user. Responsive capture adds buffering between the capture and photo processing stages which allows a new capture to start before processing has completed for the previous capture, so be prepared to handle -captureOutput:willBeginCaptureForResolvedSettings: being called before the -captureOutput:didFinishProcessingPhoto: for the prior requests. Processed photos continue to be delivered in the order they were captured. To minimize camera shake from the user's tapping gesture it is recommended that -capturePhotoWithSettings:delegate: be called as early as possible when handling the touch down event. Enabling responsive capture allows the fast capture prioritization feature to be used, which further increases capture rates and reduces preview and recording disruptions. See the fastCapturePrioritizationEnabled property. When requesting uncompressed output using kCVPixelBufferPixelFormatTypeKey in AVCapturePhotoSetting.format the AVCapturePhoto's pixelBuffer is allocated from a pool with enough capacity for that request only, and overlap between capture and processing is disabled. The client must release the AVCapturePhoto and references to the pixelBuffer before capturing again and the pixelBuffer's IOSurface must also no longer be in use. Changing this property requires a lengthy reconfiguration of the capture render pipeline, so you should set this property to YES before calling -[AVCaptureSession startRunning] or within -[AVCaptureSession beginConfiguration] and -[AVCaptureSession commitConfiguration] while running.
831        #[unsafe(method(isResponsiveCaptureEnabled))]
832        #[unsafe(method_family = none)]
833        pub unsafe fn isResponsiveCaptureEnabled(&self) -> bool;
834
835        /// Setter for [`isResponsiveCaptureEnabled`][Self::isResponsiveCaptureEnabled].
836        #[unsafe(method(setResponsiveCaptureEnabled:))]
837        #[unsafe(method_family = none)]
838        pub unsafe fn setResponsiveCaptureEnabled(&self, responsive_capture_enabled: bool);
839
840        /// A value specifying whether the photo output is ready to respond to new capture requests in a timely manner.
841        ///
842        ///
843        /// This property can be key-value observed to enable and disable shutter button UI depending on whether the output is ready to capture, which is especially important when the responsiveCaptureEnabled property is YES. When interacting with AVCapturePhotoOutput on a background queue AVCapturePhotoOutputReadinessCoordinator should instead be used to observe readiness changes and perform UI updates. Capturing only when the output is ready limits the number of requests inflight to minimize shutter lag while maintaining the fastest shot to shot time. When the property returns a value other than Ready the output is not ready to capture and the shutter button should be disabled to prevent the user from initiating new requests. The output continues to accept requests when the captureReadiness property returns a value other than Ready, but the request may not be serviced for a longer period. The visual presentation of the shutter button can be customized based on the readiness value. When the user rapidly taps the shutter button the property may transition to NotReadyMomentarily for a brief period. Although the shutter button should be disabled during this period it is short lived enough that dimming or changing the appearance of the shutter is not recommended as it would be visually distracting to the user. Longer running capture types like flash or captures with AVCapturePhotoQualityPrioritizationQuality may prevent the output from capturing for an extended period, indicated by NotReadyWaitingForCapture or NotReadyWaitingForProcessing, which is appropriate to show by dimming or disabling the shutter button. For NotReadyWaitingForProcessing it is also appropriate to show a spinner or other indication that the shutter is busy.
844        #[unsafe(method(captureReadiness))]
845        #[unsafe(method_family = none)]
846        pub unsafe fn captureReadiness(&self) -> AVCapturePhotoOutputCaptureReadiness;
847
848        /// A BOOL value specifying whether constant color capture is supported.
849        ///
850        ///
851        /// An object's color in a photograph is affected by the light sources illuminating the scene, so the color of the same object photographed in warm light might look markedly different than in colder light. In some use cases, such ambient light induced color variation is undesirable, and the user may prefer an estimate of what these materials would look like under a standard light such as daylight (D65), regardless of the lighting conditions at the time the photograph was taken. Some devices are capable of producing such constant color photos.
852        ///
853        /// Constant color captures require the flash to be fired and may require pre-flash sequence to determine the correct focus and exposure, therefore it might take several seconds to acquire a constant color photo. Due to this flash requirement, a constant color capture can only be taken with AVCaptureFlashModeAuto or AVCaptureFlashModeOn as the flash mode, otherwise an exception is thrown.
854        ///
855        /// Constant color can only be achieved when the flash has a discernible effect on the scene so it may not perform well in bright conditions such as direct sunlight. Use the constantColorConfidenceMap property to examine the confidence level, and therefore the usefulness, of each region of a constant color photo.
856        ///
857        /// Constant color should not be used in conjunction with locked or manual white balance.
858        ///
859        /// This property returns YES if the session's current configuration allows photos to be captured with constant color. When switching cameras or formats this property may change. When this property changes from YES to NO, constantColorEnabled also reverts to NO. If you've previously opted in for constant color and then change configurations, you may need to set constantColorEnabled = YES again. This property is key-value observable.
860        #[unsafe(method(isConstantColorSupported))]
861        #[unsafe(method_family = none)]
862        pub unsafe fn isConstantColorSupported(&self) -> bool;
863
864        /// A BOOL value specifying whether the photo render pipeline is set up to perform constant color captures.
865        ///
866        ///
867        /// Default is NO. Set to YES to enable support for taking constant color photos. This property may only be set to YES if constantColorSupported is YES. Enabling constant color requires a lengthy reconfiguration of the capture render pipeline, so if you intend to capture constant color photos, you should set this property to YES before calling -[AVCaptureSession startRunning] or within -[AVCaptureSession beginConfiguration] and -[AVCaptureSession commitConfiguration] while running.
868        #[unsafe(method(isConstantColorEnabled))]
869        #[unsafe(method_family = none)]
870        pub unsafe fn isConstantColorEnabled(&self) -> bool;
871
872        /// Setter for [`isConstantColorEnabled`][Self::isConstantColorEnabled].
873        #[unsafe(method(setConstantColorEnabled:))]
874        #[unsafe(method_family = none)]
875        pub unsafe fn setConstantColorEnabled(&self, constant_color_enabled: bool);
876
877        /// Specifies whether suppressing the shutter sound is supported.
878        ///
879        ///
880        /// On iOS, this property returns NO in jurisdictions where shutter sound production cannot be disabled. On all other platforms, it always returns NO.
881        #[unsafe(method(isShutterSoundSuppressionSupported))]
882        #[unsafe(method_family = none)]
883        pub unsafe fn isShutterSoundSuppressionSupported(&self) -> bool;
884
885        /// A read-only BOOL value indicating whether still image buffers may be rotated to match the sensor orientation of earlier generation hardware.
886        ///
887        /// Value is YES for camera configurations which support compensation for the sensor orientation, which is applied to HEIC, JPEG, and uncompressed processed photos only; compensation is never applied to Bayer RAW or Apple ProRaw captures.
888        #[unsafe(method(isCameraSensorOrientationCompensationSupported))]
889        #[unsafe(method_family = none)]
890        pub unsafe fn isCameraSensorOrientationCompensationSupported(&self) -> bool;
891
892        /// A BOOL value indicating that still image buffers will be rotated to match the sensor orientation of earlier generation hardware.
893        ///
894        /// Default is YES when cameraSensorOrientationCompensationSupported is YES. Set to NO if your app does not require sensor orientation compensation.
895        #[unsafe(method(isCameraSensorOrientationCompensationEnabled))]
896        #[unsafe(method_family = none)]
897        pub unsafe fn isCameraSensorOrientationCompensationEnabled(&self) -> bool;
898
899        /// Setter for [`isCameraSensorOrientationCompensationEnabled`][Self::isCameraSensorOrientationCompensationEnabled].
900        #[unsafe(method(setCameraSensorOrientationCompensationEnabled:))]
901        #[unsafe(method_family = none)]
902        pub unsafe fn setCameraSensorOrientationCompensationEnabled(
903            &self,
904            camera_sensor_orientation_compensation_enabled: bool,
905        );
906    );
907}
908
909extern_class!(
910    /// AVCapturePhotoOutputReadinessCoordinator notifies its delegate of changes in an AVCapturePhotoOutput's captureReadiness property and can be used to coordinate UI updates on the main queue with use of AVCapturePhotoOutput on a background queue.
911    ///
912    ///
913    /// AVCapturePhotoOutputReadinessCoordinator tracks its output's captureReadiness and incorporates additional requests registered via -startTrackingCaptureRequestUsingPhotoSettings:. This allows clients to synchronously update shutter button availability and appearance and on the main thread while calling -[AVCapturePhotoOutput capturePhotoWithSettings:delegate:] asynchronously on a background queue.
914    ///
915    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturephotooutputreadinesscoordinator?language=objc)
916    #[unsafe(super(NSObject))]
917    #[derive(Debug, PartialEq, Eq, Hash)]
918    pub struct AVCapturePhotoOutputReadinessCoordinator;
919);
920
921extern_conformance!(
922    unsafe impl NSObjectProtocol for AVCapturePhotoOutputReadinessCoordinator {}
923);
924
925impl AVCapturePhotoOutputReadinessCoordinator {
926    extern_methods!(
927        #[unsafe(method(init))]
928        #[unsafe(method_family = init)]
929        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
930
931        #[unsafe(method(new))]
932        #[unsafe(method_family = new)]
933        pub unsafe fn new() -> Retained<Self>;
934
935        #[cfg(feature = "AVCaptureOutputBase")]
936        #[unsafe(method(initWithPhotoOutput:))]
937        #[unsafe(method_family = init)]
938        pub unsafe fn initWithPhotoOutput(
939            this: Allocated<Self>,
940            photo_output: &AVCapturePhotoOutput,
941        ) -> Retained<Self>;
942
943        /// The receiver's delegate, called on the main queue.
944        ///
945        ///
946        /// The value of this property is an object conforming to the AVCapturePhotoOutputReadinessCoordinatorDelegate protocol that will receive a callback when the captureReadiness property changes. Callbacks are delivered on the main queue, allowing UI updates to be done directly in the callback. A callback with the initial value of captureReadiness is delivered when delegate is set.
947        #[unsafe(method(delegate))]
948        #[unsafe(method_family = none)]
949        pub unsafe fn delegate(
950            &self,
951        ) -> Option<Retained<ProtocolObject<dyn AVCapturePhotoOutputReadinessCoordinatorDelegate>>>;
952
953        /// Setter for [`delegate`][Self::delegate].
954        ///
955        /// This is a [weak property][objc2::topics::weak_property].
956        #[unsafe(method(setDelegate:))]
957        #[unsafe(method_family = none)]
958        pub unsafe fn setDelegate(
959            &self,
960            delegate: Option<&ProtocolObject<dyn AVCapturePhotoOutputReadinessCoordinatorDelegate>>,
961        );
962
963        /// A value specifying whether the coordinator's photo output is ready to respond to new capture requests in a timely manner.
964        ///
965        ///
966        /// The value incorporates the photo output's captureReadiness and any requests registered using -startTrackingCaptureRequestUsingPhotoSettings:. The value is updated before calling the -readinessCoordinator:captureReadinessDidChange: callback. See AVCapturePhotoOutput's captureReadiness documentation for a discussion of how to update shutter availability and appearance based on the captureReadiness value. This property is key-value observable and all change notifications are delivered on the main queue, allowing UI updates to be done directly in the callback.
967        #[unsafe(method(captureReadiness))]
968        #[unsafe(method_family = none)]
969        pub unsafe fn captureReadiness(&self) -> AVCapturePhotoOutputCaptureReadiness;
970
971        /// Track the capture request represented by the specified photo settings until it is enqueued to the photo output and update captureReadiness to include this request.
972        ///
973        ///
974        /// Parameter `settings`: The AVCapturePhotoSettings which will be passed to -[AVCapturePhotoOutput capturePhotoWithSettings:delegate] for this capture request.
975        ///
976        ///
977        /// The captureReadiness property is updated to include the tracked request until the the photo output receives a settings object with the same or a newer uniqueID. It is recommended that the same photo settings be passed to -[AVCapturePhotoOutput capturePhotoWithSettings:delegate] to ensure the captureReadiness value is consistent once the capture begins. When called on the main queue the delegate callback is invoked synchronously before returning to ensure shutter availability is updated immediately and prevent queued touch events from initiating unwanted captures. The -startTrackingCaptureRequestUsingPhotoSettings: method can be called while in the SessionNotRunning state to allow the shutter button to be interactive while the session is being started on a background queue. An NSInvalidArgumentException is thrown if the photo settings are invalid.
978        #[unsafe(method(startTrackingCaptureRequestUsingPhotoSettings:))]
979        #[unsafe(method_family = none)]
980        pub unsafe fn startTrackingCaptureRequestUsingPhotoSettings(
981            &self,
982            settings: &AVCapturePhotoSettings,
983        );
984
985        /// Stop tracking the capture request represented by the specified photo settings uniqueID and update captureReadiness to no longer include this request.
986        ///
987        ///
988        /// Parameter `settingsUniqueID`: The AVCapturePhotoSettings.uniqueID of the settings passed to -startTrackingCaptureRequestUsingPhotoSettings:.
989        ///
990        ///
991        /// Tracking automatically stops when -[AVCapturePhotoOutput capturePhotoWithSettings:delegate] is called with a photo settings objects with the same or a newer uniqueID, but in cases where an error or other condition prevents calling -capturePhotoWithSettings:delegate tracking should be explicitly stopped to ensure the captureReadiness value is up to date. When called on the main queue the delegate callback is invoked synchronously before returning to ensure shutter availability is updated immediately.
992        #[unsafe(method(stopTrackingCaptureRequestUsingPhotoSettingsUniqueID:))]
993        #[unsafe(method_family = none)]
994        pub unsafe fn stopTrackingCaptureRequestUsingPhotoSettingsUniqueID(
995            &self,
996            settings_unique_id: i64,
997        );
998    );
999}
1000
1001extern_protocol!(
1002    /// [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturephotooutputreadinesscoordinatordelegate?language=objc)
1003    pub unsafe trait AVCapturePhotoOutputReadinessCoordinatorDelegate:
1004        NSObjectProtocol
1005    {
1006        /// A callback delivered on the main queue whenever the captureReadiness property changes.
1007        ///
1008        ///
1009        /// Parameter `coordinator`: The calling instance of AVCapturePhotoOutputReadinessCoordinator.
1010        ///
1011        /// Parameter `captureReadiness`: The updated captureReadiness value which can be used to update shutter button availability and appearance.
1012        ///
1013        ///
1014        /// This callback is always delivered on the main queue and is suitable for updating shutter button availability and appearance.
1015        #[optional]
1016        #[unsafe(method(readinessCoordinator:captureReadinessDidChange:))]
1017        #[unsafe(method_family = none)]
1018        unsafe fn readinessCoordinator_captureReadinessDidChange(
1019            &self,
1020            coordinator: &AVCapturePhotoOutputReadinessCoordinator,
1021            capture_readiness: AVCapturePhotoOutputCaptureReadiness,
1022        );
1023    }
1024);
1025
1026/// AVCapturePhotoOutputDepthDataDeliverySupport.
1027#[cfg(feature = "AVCaptureOutputBase")]
1028impl AVCapturePhotoOutput {
1029    extern_methods!(
1030        /// A BOOL value specifying whether depth data delivery is supported.
1031        ///
1032        ///
1033        /// Some cameras and configurations support the delivery of depth data (e.g. disparity maps) along with the photo. This property returns YES if the session's current configuration allows photos to be captured with depth data, from which depth-related filters may be applied. When switching cameras or formats this property may change. When this property changes from YES to NO, depthDataDeliveryEnabled also reverts to NO. If you've previously opted in for depth data delivery and then change configurations, you may need to set depthDataDeliveryEnabled = YES again. This property is key-value observable.
1034        #[unsafe(method(isDepthDataDeliverySupported))]
1035        #[unsafe(method_family = none)]
1036        pub unsafe fn isDepthDataDeliverySupported(&self) -> bool;
1037
1038        /// A BOOL specifying whether the photo render pipeline is prepared for depth data delivery.
1039        ///
1040        ///
1041        /// Default is NO. Set to YES if you wish depth data to be delivered with your AVCapturePhotos. This property may only be set to YES if depthDataDeliverySupported is YES. Enabling depth data delivery requires a lengthy reconfiguration of the capture render pipeline, so if you intend to capture depth data, you should set this property to YES before calling -[AVCaptureSession startRunning].
1042        #[unsafe(method(isDepthDataDeliveryEnabled))]
1043        #[unsafe(method_family = none)]
1044        pub unsafe fn isDepthDataDeliveryEnabled(&self) -> bool;
1045
1046        /// Setter for [`isDepthDataDeliveryEnabled`][Self::isDepthDataDeliveryEnabled].
1047        #[unsafe(method(setDepthDataDeliveryEnabled:))]
1048        #[unsafe(method_family = none)]
1049        pub unsafe fn setDepthDataDeliveryEnabled(&self, depth_data_delivery_enabled: bool);
1050
1051        /// A BOOL value specifying whether portrait effects matte delivery is supported.
1052        ///
1053        ///
1054        /// Some cameras and configurations support the delivery of a matting image to augment depth data and aid in high quality portrait effect rendering (see AVPortraitEffectsMatte.h). This property returns YES if the session's current configuration allows photos to be captured with a portrait effects matte. When switching cameras or formats this property may change. When this property changes from YES to NO, portraitEffectsMatteDeliveryEnabled also reverts to NO. If you've previously opted in for portrait effects matte delivery and then change configurations, you may need to set portraitEffectsMatteDeliveryEnabled = YES again. This property is key-value observable.
1055        #[unsafe(method(isPortraitEffectsMatteDeliverySupported))]
1056        #[unsafe(method_family = none)]
1057        pub unsafe fn isPortraitEffectsMatteDeliverySupported(&self) -> bool;
1058
1059        /// A BOOL specifying whether the photo render pipeline is prepared for portrait effects matte delivery.
1060        ///
1061        ///
1062        /// Default is NO. Set to YES if you wish portrait effects mattes to be delivered with your AVCapturePhotos. This property may only be set to YES if portraitEffectsMatteDeliverySupported is YES. Portrait effects matte generation requires depth to be present, so when enabling portrait effects matte delivery, you must also set depthDataDeliveryEnabled to YES. Enabling portrait effects matte delivery requires a lengthy reconfiguration of the capture render pipeline, so if you intend to capture portrait effects mattes, you should set this property to YES before calling -[AVCaptureSession startRunning].
1063        #[unsafe(method(isPortraitEffectsMatteDeliveryEnabled))]
1064        #[unsafe(method_family = none)]
1065        pub unsafe fn isPortraitEffectsMatteDeliveryEnabled(&self) -> bool;
1066
1067        /// Setter for [`isPortraitEffectsMatteDeliveryEnabled`][Self::isPortraitEffectsMatteDeliveryEnabled].
1068        #[unsafe(method(setPortraitEffectsMatteDeliveryEnabled:))]
1069        #[unsafe(method_family = none)]
1070        pub unsafe fn setPortraitEffectsMatteDeliveryEnabled(
1071            &self,
1072            portrait_effects_matte_delivery_enabled: bool,
1073        );
1074
1075        #[cfg(feature = "AVSemanticSegmentationMatte")]
1076        /// An array of supported semantic segmentation matte types that may be captured and delivered along with your AVCapturePhotos.
1077        ///
1078        ///
1079        /// Some cameras and configurations support the delivery of semantic segmentation matting images (e.g. segmentations of the hair, skin, or teeth in the photo). This property returns an array of AVSemanticSegmentationMatteTypes available given the session's current configuration. When switching cameras or formats this property may change. When this property changes, enabledSemanticSegmentationMatteTypes reverts to an empty array. If you've previously opted in for delivery of one or more semantic segmentation mattes and then change configurations, you need to set up your enabledSemanticSegmentationMatteTypes again. This property is key-value observable.
1080        #[unsafe(method(availableSemanticSegmentationMatteTypes))]
1081        #[unsafe(method_family = none)]
1082        pub unsafe fn availableSemanticSegmentationMatteTypes(
1083            &self,
1084        ) -> Retained<NSArray<AVSemanticSegmentationMatteType>>;
1085
1086        #[cfg(feature = "AVSemanticSegmentationMatte")]
1087        /// An array of semantic segmentation matte types which the photo render pipeline is prepared to deliver.
1088        ///
1089        ///
1090        /// Default is empty array. You may set this to the array of matte types you'd like to be delivered with your AVCapturePhotos. The array may only contain values present in availableSemanticSegmentationMatteTypes. Enabling semantic segmentation matte delivery requires a lengthy reconfiguration of the capture render pipeline, so if you intend to capture semantic segmentation mattes, you should set this property to YES before calling -[AVCaptureSession startRunning].
1091        #[unsafe(method(enabledSemanticSegmentationMatteTypes))]
1092        #[unsafe(method_family = none)]
1093        pub unsafe fn enabledSemanticSegmentationMatteTypes(
1094            &self,
1095        ) -> Retained<NSArray<AVSemanticSegmentationMatteType>>;
1096
1097        #[cfg(feature = "AVSemanticSegmentationMatte")]
1098        /// Setter for [`enabledSemanticSegmentationMatteTypes`][Self::enabledSemanticSegmentationMatteTypes].
1099        #[unsafe(method(setEnabledSemanticSegmentationMatteTypes:))]
1100        #[unsafe(method_family = none)]
1101        pub unsafe fn setEnabledSemanticSegmentationMatteTypes(
1102            &self,
1103            enabled_semantic_segmentation_matte_types: &NSArray<AVSemanticSegmentationMatteType>,
1104        );
1105    );
1106}
1107
1108extern_protocol!(
1109    /// A set of delegate callbacks to be implemented by a client who calls AVCapturePhotoOutput's -capturePhotoWithSettings:delegate.
1110    ///
1111    ///
1112    /// AVCapturePhotoOutput invokes the AVCapturePhotoCaptureDelegate callbacks on a common dispatch queue — not necessarily the main queue. While the -captureOutput:willBeginCaptureForResolvedSettings: callback always comes first and the -captureOutput:didFinishCaptureForResolvedSettings: callback always comes last, none of the other callbacks can be assumed to come in any particular order. The AVCaptureResolvedPhotoSettings instance passed to the client with each callback has the same uniqueID as the AVCapturePhotoSettings instance passed in -capturePhotoWithSettings:delegate:. All callbacks are marked optional, but depending on the features you've specified in your AVCapturePhotoSettings, some callbacks become mandatory and are validated in -capturePhotoWithSettings:delegate:. If your delegate does not implement the mandatory callbacks, an NSInvalidArgumentException is thrown.
1113    ///
1114    /// - If you initialize your photo settings with a format dictionary, or use one of the default constructors (that is, if you're not requesting a RAW-only capture), your delegate must respond to either - captureOutput:didFinishProcessingPhoto:error: or the deprecated -captureOutput:didFinishProcessingPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:. If your delegate responds to both of these callbacks, only the undeprecated variant will be called.
1115    /// - If you initialize your photo settings with a rawPhotoPixelFormatType, your delegate must respond to either -captureOutput:didFinishProcessingPhoto:error: or the deprecated -captureOutput:didFinishProcessingRawPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:. If your delegate responds to both of these callbacks, only the undeprecated variant will be called.
1116    /// - If you set livePhotoMovieFileURL to non-nil, your delegate must respond to -captureOutput:didFinishProcessingLivePhotoToMovieFileAtURL:duration:photoDisplayTime:resolvedSettings:error:.
1117    ///
1118    /// In the event of an error, all expected callbacks are fired with an appropriate error.
1119    ///
1120    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturephotocapturedelegate?language=objc)
1121    pub unsafe trait AVCapturePhotoCaptureDelegate: NSObjectProtocol {
1122        #[cfg(feature = "AVCaptureOutputBase")]
1123        /// A callback fired as soon as the capture settings have been resolved.
1124        ///
1125        ///
1126        /// Parameter `output`: The calling instance of AVCapturePhotoOutput.
1127        ///
1128        /// Parameter `resolvedSettings`: An instance of AVCaptureResolvedPhotoSettings indicating which capture features have been selected.
1129        ///
1130        ///
1131        /// This callback is always delivered first for a particular capture request. It is delivered as soon as possible after you call -capturePhotoWithSettings:delegate:, so you can know what to expect in the remainder of your callbacks.
1132        #[optional]
1133        #[unsafe(method(captureOutput:willBeginCaptureForResolvedSettings:))]
1134        #[unsafe(method_family = none)]
1135        unsafe fn captureOutput_willBeginCaptureForResolvedSettings(
1136            &self,
1137            output: &AVCapturePhotoOutput,
1138            resolved_settings: &AVCaptureResolvedPhotoSettings,
1139        );
1140
1141        #[cfg(feature = "AVCaptureOutputBase")]
1142        /// A callback fired just as the photo is being taken.
1143        ///
1144        ///
1145        /// Parameter `output`: The calling instance of AVCapturePhotoOutput.
1146        ///
1147        /// Parameter `resolvedSettings`: An instance of AVCaptureResolvedPhotoSettings indicating which capture features have been selected.
1148        ///
1149        ///
1150        /// The timing of this callback is analogous to AVCaptureStillImageOutput's capturingStillImage property changing from NO to YES. The callback is delivered right after the shutter sound is heard (note that shutter sounds are suppressed when Live Photos are being captured).
1151        #[optional]
1152        #[unsafe(method(captureOutput:willCapturePhotoForResolvedSettings:))]
1153        #[unsafe(method_family = none)]
1154        unsafe fn captureOutput_willCapturePhotoForResolvedSettings(
1155            &self,
1156            output: &AVCapturePhotoOutput,
1157            resolved_settings: &AVCaptureResolvedPhotoSettings,
1158        );
1159
1160        #[cfg(feature = "AVCaptureOutputBase")]
1161        /// A callback fired just after the photo is taken.
1162        ///
1163        ///
1164        /// Parameter `output`: The calling instance of AVCapturePhotoOutput.
1165        ///
1166        /// Parameter `resolvedSettings`: An instance of AVCaptureResolvedPhotoSettings indicating which capture features have been selected.
1167        ///
1168        ///
1169        /// The timing of this callback is analogous to AVCaptureStillImageOutput's capturingStillImage property changing from YES to NO.
1170        #[optional]
1171        #[unsafe(method(captureOutput:didCapturePhotoForResolvedSettings:))]
1172        #[unsafe(method_family = none)]
1173        unsafe fn captureOutput_didCapturePhotoForResolvedSettings(
1174            &self,
1175            output: &AVCapturePhotoOutput,
1176            resolved_settings: &AVCaptureResolvedPhotoSettings,
1177        );
1178
1179        #[cfg(feature = "AVCaptureOutputBase")]
1180        /// A callback fired when photos are ready to be delivered to you (RAW or processed).
1181        ///
1182        ///
1183        /// Parameter `output`: The calling instance of AVCapturePhotoOutput.
1184        ///
1185        /// Parameter `photo`: An instance of AVCapturePhoto.
1186        ///
1187        /// Parameter `error`: An error indicating what went wrong. If the photo was processed successfully, nil is returned.
1188        ///
1189        ///
1190        /// This callback fires resolvedSettings.expectedPhotoCount number of times for a given capture request. Note that the photo parameter is always non nil, even if an error is returned. The delivered AVCapturePhoto's rawPhoto property can be queried to know if it's a RAW image or processed image.
1191        #[optional]
1192        #[unsafe(method(captureOutput:didFinishProcessingPhoto:error:))]
1193        #[unsafe(method_family = none)]
1194        unsafe fn captureOutput_didFinishProcessingPhoto_error(
1195            &self,
1196            output: &AVCapturePhotoOutput,
1197            photo: &AVCapturePhoto,
1198            error: Option<&NSError>,
1199        );
1200
1201        #[cfg(feature = "AVCaptureOutputBase")]
1202        /// A callback fired just after the photo proxy has been taken.
1203        ///
1204        ///
1205        /// Parameter `output`: The calling instance of AVCapturePhotoOutput.
1206        ///
1207        /// Parameter `deferredPhotoProxy`: The AVCaptureDeferredPhotoProxy instance which contains a proxy CVPixelBuffer as a placeholder for the final image.  The fileDataRepresentation from this object may be used with PHAssetCreation to eventually produce the final, processed photo into the user's Photo Library.  The in-memory proxy fileDataRepresentation should be added to the photo library as quickly as possible after receipt to ensure that the photo library can begin background processing and also so that the intermediates are not removed by a periodic clean-up job looking for abandoned intermediates produced by using the deferred photo processing APIs.
1208        ///
1209        ///
1210        /// Parameter `error`: An error indicating what went wrong if the photo proxy or any of the underlying intermediate files couldn't be created.
1211        ///
1212        ///
1213        /// Delegates are required to implement this method if they opt in for deferred photo processing, otherwise an NSInvalidArgumentException will be thrown from the -[AVCapturePhotoOutput capturePhotoWithSettings:delegate:] method.
1214        #[optional]
1215        #[unsafe(method(captureOutput:didFinishCapturingDeferredPhotoProxy:error:))]
1216        #[unsafe(method_family = none)]
1217        unsafe fn captureOutput_didFinishCapturingDeferredPhotoProxy_error(
1218            &self,
1219            output: &AVCapturePhotoOutput,
1220            deferred_photo_proxy: Option<&AVCaptureDeferredPhotoProxy>,
1221            error: Option<&NSError>,
1222        );
1223
1224        #[cfg(all(
1225            feature = "AVCaptureOutputBase",
1226            feature = "AVCaptureStillImageOutput",
1227            feature = "objc2-core-media"
1228        ))]
1229        /// A callback fired when the primary processed photo or photos are done.
1230        ///
1231        ///
1232        /// Parameter `output`: The calling instance of AVCapturePhotoOutput.
1233        ///
1234        /// Parameter `photoSampleBuffer`: A CMSampleBuffer containing an uncompressed pixel buffer or compressed data, along with timing information and metadata. May be nil if there was an error.
1235        ///
1236        /// Parameter `previewPhotoSampleBuffer`: An optional CMSampleBuffer containing an uncompressed, down-scaled preview pixel buffer. Note that the preview sample buffer contains no metadata. Refer to the photoSampleBuffer for metadata (e.g., the orientation). May be nil.
1237        ///
1238        /// Parameter `resolvedSettings`: An instance of AVCaptureResolvedPhotoSettings indicating which capture features have been selected.
1239        ///
1240        /// Parameter `bracketSettings`: If this image is being delivered as part of a bracketed capture, the bracketSettings corresponding to this image. Otherwise nil.
1241        ///
1242        /// Parameter `error`: An error indicating what went wrong if photoSampleBuffer is nil.
1243        ///
1244        ///
1245        /// If you've requested a single processed image (uncompressed or compressed) capture, the photo is delivered here. If you've requested a bracketed capture, this callback is fired bracketedSettings.count times (once for each photo in the bracket).
1246        #[deprecated]
1247        #[optional]
1248        #[unsafe(method(captureOutput:didFinishProcessingPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:))]
1249        #[unsafe(method_family = none)]
1250        unsafe fn captureOutput_didFinishProcessingPhotoSampleBuffer_previewPhotoSampleBuffer_resolvedSettings_bracketSettings_error(
1251            &self,
1252            output: &AVCapturePhotoOutput,
1253            photo_sample_buffer: Option<&CMSampleBuffer>,
1254            preview_photo_sample_buffer: Option<&CMSampleBuffer>,
1255            resolved_settings: &AVCaptureResolvedPhotoSettings,
1256            bracket_settings: Option<&AVCaptureBracketedStillImageSettings>,
1257            error: Option<&NSError>,
1258        );
1259
1260        #[cfg(all(
1261            feature = "AVCaptureOutputBase",
1262            feature = "AVCaptureStillImageOutput",
1263            feature = "objc2-core-media"
1264        ))]
1265        /// A callback fired when the RAW photo or photos are done.
1266        ///
1267        ///
1268        /// Parameter `output`: The calling instance of AVCapturePhotoOutput.
1269        ///
1270        /// Parameter `rawSampleBuffer`: A CMSampleBuffer containing Bayer RAW pixel data, along with timing information and metadata. May be nil if there was an error.
1271        ///
1272        /// Parameter `previewPhotoSampleBuffer`: An optional CMSampleBuffer containing an uncompressed, down-scaled preview pixel buffer. Note that the preview sample buffer contains no metadata. Refer to the rawSampleBuffer for metadata (e.g., the orientation). May be nil.
1273        ///
1274        /// Parameter `resolvedSettings`: An instance of AVCaptureResolvedPhotoSettings indicating which capture features have been selected.
1275        ///
1276        /// Parameter `bracketSettings`: If this image is being delivered as part of a bracketed capture, the bracketSettings corresponding to this image. Otherwise nil.
1277        ///
1278        /// Parameter `error`: An error indicating what went wrong if rawSampleBuffer is nil.
1279        ///
1280        ///
1281        /// Single RAW image and bracketed RAW photos are delivered here. If you've requested a RAW bracketed capture, this callback is fired bracketedSettings.count times (once for each photo in the bracket).
1282        #[deprecated]
1283        #[optional]
1284        #[unsafe(method(captureOutput:didFinishProcessingRawPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:))]
1285        #[unsafe(method_family = none)]
1286        unsafe fn captureOutput_didFinishProcessingRawPhotoSampleBuffer_previewPhotoSampleBuffer_resolvedSettings_bracketSettings_error(
1287            &self,
1288            output: &AVCapturePhotoOutput,
1289            raw_sample_buffer: Option<&CMSampleBuffer>,
1290            preview_photo_sample_buffer: Option<&CMSampleBuffer>,
1291            resolved_settings: &AVCaptureResolvedPhotoSettings,
1292            bracket_settings: Option<&AVCaptureBracketedStillImageSettings>,
1293            error: Option<&NSError>,
1294        );
1295
1296        #[cfg(feature = "AVCaptureOutputBase")]
1297        /// A callback fired when the Live Photo movie has captured all its media data, though all media has not yet been written to file.
1298        ///
1299        ///
1300        /// Parameter `output`: The calling instance of AVCapturePhotoOutput.
1301        ///
1302        /// Parameter `outputFileURL`: The URL to which the movie file will be written. This URL is equal to your AVCapturePhotoSettings.livePhotoMovieURL.
1303        ///
1304        /// Parameter `resolvedSettings`: An instance of AVCaptureResolvedPhotoSettings indicating which capture features have been selected.
1305        ///
1306        ///
1307        /// When this callback fires, no new media is being written to the file. If you are displaying a "Live" badge, this is an appropriate time to dismiss it. The movie file itself is not done being written until the -captureOutput:didFinishProcessingLivePhotoToMovieFileAtURL:duration:photoDisplayTime:resolvedSettings:error: callback fires.
1308        #[optional]
1309        #[unsafe(method(captureOutput:didFinishRecordingLivePhotoMovieForEventualFileAtURL:resolvedSettings:))]
1310        #[unsafe(method_family = none)]
1311        unsafe fn captureOutput_didFinishRecordingLivePhotoMovieForEventualFileAtURL_resolvedSettings(
1312            &self,
1313            output: &AVCapturePhotoOutput,
1314            output_file_url: &NSURL,
1315            resolved_settings: &AVCaptureResolvedPhotoSettings,
1316        );
1317
1318        #[cfg(all(feature = "AVCaptureOutputBase", feature = "objc2-core-media"))]
1319        /// A callback fired when the Live Photo movie is finished being written to disk.
1320        ///
1321        ///
1322        /// Parameter `output`: The calling instance of AVCapturePhotoOutput.
1323        ///
1324        /// Parameter `outputFileURL`: The URL where the movie file resides. This URL is equal to your AVCapturePhotoSettings.livePhotoMovieURL.
1325        ///
1326        /// Parameter `duration`: A CMTime indicating the duration of the movie file.
1327        ///
1328        /// Parameter `photoDisplayTime`: A CMTime indicating the time in the movie at which the still photo should be displayed.
1329        ///
1330        /// Parameter `resolvedSettings`: An instance of AVCaptureResolvedPhotoSettings indicating which capture features have been selected.
1331        ///
1332        /// Parameter `error`: An error indicating what went wrong if the outputFileURL is damaged.
1333        ///
1334        ///
1335        /// When this callback fires, the movie on disk is fully finished and ready for consumption.
1336        #[optional]
1337        #[unsafe(method(captureOutput:didFinishProcessingLivePhotoToMovieFileAtURL:duration:photoDisplayTime:resolvedSettings:error:))]
1338        #[unsafe(method_family = none)]
1339        unsafe fn captureOutput_didFinishProcessingLivePhotoToMovieFileAtURL_duration_photoDisplayTime_resolvedSettings_error(
1340            &self,
1341            output: &AVCapturePhotoOutput,
1342            output_file_url: &NSURL,
1343            duration: CMTime,
1344            photo_display_time: CMTime,
1345            resolved_settings: &AVCaptureResolvedPhotoSettings,
1346            error: Option<&NSError>,
1347        );
1348
1349        #[cfg(feature = "AVCaptureOutputBase")]
1350        /// A callback fired when the photo capture is completed and no more callbacks will be fired.
1351        ///
1352        ///
1353        /// Parameter `output`: The calling instance of AVCapturePhotoOutput.
1354        ///
1355        /// Parameter `resolvedSettings`: An instance of AVCaptureResolvedPhotoSettings indicating which capture features were selected.
1356        ///
1357        /// Parameter `error`: An error indicating whether the capture was unsuccessful. Nil if there were no problems.
1358        ///
1359        ///
1360        /// This callback always fires last and when it does, you may clean up any state relating to this photo capture.
1361        #[optional]
1362        #[unsafe(method(captureOutput:didFinishCaptureForResolvedSettings:error:))]
1363        #[unsafe(method_family = none)]
1364        unsafe fn captureOutput_didFinishCaptureForResolvedSettings_error(
1365            &self,
1366            output: &AVCapturePhotoOutput,
1367            resolved_settings: &AVCaptureResolvedPhotoSettings,
1368            error: Option<&NSError>,
1369        );
1370    }
1371);
1372
1373extern_class!(
1374    /// A mutable settings object encapsulating all the desired properties of a photo capture.
1375    ///
1376    ///
1377    /// To take a picture, a client instantiates and configures an AVCapturePhotoSettings object, then calls AVCapturePhotoOutput's -capturePhotoWithSettings:delegate:, passing the settings and a delegate to be informed when events relating to the photo capture occur. Since AVCapturePhotoSettings has no reference to the AVCapturePhotoOutput instance with which it will be used, minimal validation occurs while you configure an AVCapturePhotoSettings instance. The bulk of the validation is executed when you call AVCapturePhotoOutput's -capturePhotoWithSettings:delegate:.
1378    ///
1379    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturephotosettings?language=objc)
1380    #[unsafe(super(NSObject))]
1381    #[derive(Debug, PartialEq, Eq, Hash)]
1382    pub struct AVCapturePhotoSettings;
1383);
1384
1385extern_conformance!(
1386    unsafe impl NSCopying for AVCapturePhotoSettings {}
1387);
1388
1389unsafe impl CopyingHelper for AVCapturePhotoSettings {
1390    type Result = Self;
1391}
1392
1393extern_conformance!(
1394    unsafe impl NSObjectProtocol for AVCapturePhotoSettings {}
1395);
1396
1397impl AVCapturePhotoSettings {
1398    extern_methods!(
1399        /// Creates a default instance of AVCapturePhotoSettings.
1400        ///
1401        ///
1402        /// Returns: An instance of AVCapturePhotoSettings.
1403        ///
1404        ///
1405        /// A default AVCapturePhotoSettings object has a format of AVVideoCodecTypeJPEG, a fileType of AVFileTypeJPEG, and photoQualityPrioritization set to AVCapturePhotoQualityPrioritizationBalanced.
1406        #[unsafe(method(photoSettings))]
1407        #[unsafe(method_family = none)]
1408        pub unsafe fn photoSettings() -> Retained<Self>;
1409
1410        /// Creates an instance of AVCapturePhotoSettings with a user-specified output format.
1411        ///
1412        ///
1413        /// Parameter `format`: A dictionary of Core Video pixel buffer attributes or AVVideoSettings, analogous to AVCaptureStillImageOutput's outputSettings property.
1414        ///
1415        /// Returns: An instance of AVCapturePhotoSettings.
1416        ///
1417        ///
1418        /// If you wish an uncompressed format, your dictionary must contain kCVPixelBufferPixelFormatTypeKey, and the format specified must be present in AVCapturePhotoOutput's -availablePhotoPixelFormatTypes array. kCVPixelBufferPixelFormatTypeKey is the only supported key when expressing uncompressed output. If you wish a compressed format, your dictionary must contain AVVideoCodecKey and the codec specified must be present in AVCapturePhotoOutput's -availablePhotoCodecTypes array. If you are specifying a compressed format, the AVVideoCompressionPropertiesKey is also supported, with a payload dictionary containing a single AVVideoQualityKey. Passing a nil format dictionary is analogous to calling +photoSettings.
1419        ///
1420        /// # Safety
1421        ///
1422        /// `format` generic should be of the correct type.
1423        #[unsafe(method(photoSettingsWithFormat:))]
1424        #[unsafe(method_family = none)]
1425        pub unsafe fn photoSettingsWithFormat(
1426            format: Option<&NSDictionary<NSString, AnyObject>>,
1427        ) -> Retained<Self>;
1428
1429        /// Creates an instance of AVCapturePhotoSettings specifying RAW only output.
1430        ///
1431        ///
1432        /// Parameter `rawPixelFormatType`: A Bayer RAW or Apple ProRAW pixel format OSType (defined in CVPixelBuffer.h).
1433        ///
1434        /// Returns: An instance of AVCapturePhotoSettings.
1435        ///
1436        ///
1437        /// rawPixelFormatType must be one of the OSTypes contained in AVCapturePhotoOutput's -availableRawPhotoPixelFormatTypes array. See AVCapturePhotoOutput's -capturePhotoWithSettings:delegate: inline documentation for a discussion of restrictions on AVCapturePhotoSettings when requesting RAW capture.
1438        #[unsafe(method(photoSettingsWithRawPixelFormatType:))]
1439        #[unsafe(method_family = none)]
1440        pub unsafe fn photoSettingsWithRawPixelFormatType(
1441            raw_pixel_format_type: OSType,
1442        ) -> Retained<Self>;
1443
1444        /// Creates an instance of AVCapturePhotoSettings specifying RAW + a processed format (such as JPEG).
1445        ///
1446        ///
1447        /// Parameter `rawPixelFormatType`: A Bayer RAW or Apple ProRAW pixel format OSType (defined in CVPixelBuffer.h).
1448        ///
1449        /// Parameter `processedFormat`: A dictionary of Core Video pixel buffer attributes or AVVideoSettings, analogous to AVCaptureStillImageOutput's outputSettings property.
1450        ///
1451        /// Returns: An instance of AVCapturePhotoSettings.
1452        ///
1453        ///
1454        /// rawPixelFormatType must be one of the OSTypes contained in AVCapturePhotoOutput's -availableRawPhotoPixelFormatTypes array. If you wish an uncompressed processedFormat, your dictionary must contain kCVPixelBufferPixelFormatTypeKey, and the processedFormat specified must be present in AVCapturePhotoOutput's -availablePhotoPixelFormatTypes array. kCVPixelBufferPixelFormatTypeKey is the only supported key when expressing uncompressed processedFormat. If you wish a compressed format, your dictionary must contain AVVideoCodecKey and the codec specified must be present in AVCapturePhotoOutput's -availablePhotoCodecTypes array. If you are specifying a compressed format, the AVVideoCompressionPropertiesKey is also supported, with a payload dictionary containing a single AVVideoQualityKey. Passing a nil processedFormat dictionary is analogous to calling +photoSettingsWithRawPixelFormatType:. See AVCapturePhotoOutput's -capturePhotoWithSettings:delegate: inline documentation for a discussion of restrictions on AVCapturePhotoSettings when requesting RAW capture.
1455        ///
1456        /// # Safety
1457        ///
1458        /// `processed_format` generic should be of the correct type.
1459        #[unsafe(method(photoSettingsWithRawPixelFormatType:processedFormat:))]
1460        #[unsafe(method_family = none)]
1461        pub unsafe fn photoSettingsWithRawPixelFormatType_processedFormat(
1462            raw_pixel_format_type: OSType,
1463            processed_format: Option<&NSDictionary<NSString, AnyObject>>,
1464        ) -> Retained<Self>;
1465
1466        #[cfg(feature = "AVMediaFormat")]
1467        /// Creates an instance of AVCapturePhotoSettings specifying RAW + a processed format (such as JPEG) and a file container to which it will be written.
1468        ///
1469        ///
1470        /// Parameter `rawPixelFormatType`: A Bayer RAW or Apple ProRAW pixel format OSType (defined in CVPixelBuffer.h). Pass 0 if you do not desire a RAW photo callback.
1471        ///
1472        /// Parameter `rawFileType`: The file container for which the RAW image should be formatted to be written. Pass nil if you have no preferred file container. A default container will be chosen for you.
1473        ///
1474        /// Parameter `processedFormat`: A dictionary of Core Video pixel buffer attributes or AVVideoSettings, analogous to AVCaptureStillImageOutput's outputSettings property. Pass nil if you do not desire a processed photo callback.
1475        ///
1476        /// Parameter `processedFileType`: The file container for which the processed image should be formatted to be written. Pass nil if you have no preferred file container. A default container will be chosen for you.
1477        ///
1478        /// Returns: An instance of AVCapturePhotoSettings.
1479        ///
1480        ///
1481        /// rawPixelFormatType must be one of the OSTypes contained in AVCapturePhotoOutput's -availableRawPhotoPixelFormatTypes array. Set rawPixelFormatType to 0 if you do not desire a RAW photo callback. If you are specifying a rawFileType, it must be present in AVCapturePhotoOutput's -availableRawPhotoFileTypes array. If you wish an uncompressed processedFormat, your dictionary must contain kCVPixelBufferPixelFormatTypeKey, and the processedFormat specified must be present in AVCapturePhotoOutput's -availablePhotoPixelFormatTypes array. kCVPixelBufferPixelFormatTypeKey is the only supported key when expressing uncompressed processedFormat. If you wish a compressed format, your dictionary must contain AVVideoCodecKey and the codec specified must be present in AVCapturePhotoOutput's -availablePhotoCodecTypes array. If you are specifying a compressed format, the AVVideoCompressionPropertiesKey is also supported, with a payload dictionary containing a single AVVideoQualityKey. If you are specifying a processedFileType (such as AVFileTypeJPEG, AVFileTypeHEIC or AVFileTypeDICOM), it must be present in AVCapturePhotoOutput's -availablePhotoFileTypes array. Pass a nil processedFormat dictionary if you only desire a RAW photo capture. See AVCapturePhotoOutput's -capturePhotoWithSettings:delegate: inline documentation for a discussion of restrictions on AVCapturePhotoSettings when requesting RAW capture.
1482        ///
1483        /// # Safety
1484        ///
1485        /// `processed_format` generic should be of the correct type.
1486        #[unsafe(method(photoSettingsWithRawPixelFormatType:rawFileType:processedFormat:processedFileType:))]
1487        #[unsafe(method_family = none)]
1488        pub unsafe fn photoSettingsWithRawPixelFormatType_rawFileType_processedFormat_processedFileType(
1489            raw_pixel_format_type: OSType,
1490            raw_file_type: Option<&AVFileType>,
1491            processed_format: Option<&NSDictionary<NSString, AnyObject>>,
1492            processed_file_type: Option<&AVFileType>,
1493        ) -> Retained<Self>;
1494
1495        /// Creates an instance of AVCapturePhotoSettings with a new uniqueID from an existing instance of AVCapturePhotoSettings.
1496        ///
1497        ///
1498        /// Parameter `photoSettings`: An existing AVCapturePhotoSettings instance.
1499        ///
1500        /// Returns: An new instance of AVCapturePhotoSettings with new uniqueID.
1501        ///
1502        ///
1503        /// Use this factory method to create a clone of an existing photo settings instance, but with a new uniqueID that can safely be passed to AVCapturePhotoOutput -capturePhotoWithSettings:delegate:.
1504        #[unsafe(method(photoSettingsFromPhotoSettings:))]
1505        #[unsafe(method_family = none)]
1506        pub unsafe fn photoSettingsFromPhotoSettings(
1507            photo_settings: &AVCapturePhotoSettings,
1508        ) -> Retained<Self>;
1509
1510        /// A 64-bit number that uniquely identifies this instance.
1511        ///
1512        ///
1513        /// When you create an instance of AVCapturePhotoSettings, a uniqueID is generated automatically. This uniqueID is guaranteed to be unique for the life time of your process.
1514        #[unsafe(method(uniqueID))]
1515        #[unsafe(method_family = none)]
1516        pub unsafe fn uniqueID(&self) -> i64;
1517
1518        /// A dictionary of Core Video pixel buffer attributes or AVVideoSettings, analogous to AVCaptureStillImageOutput's outputSettings property.
1519        ///
1520        ///
1521        /// The format dictionary you passed to one of the creation methods. May be nil if you've specified RAW-only capture.
1522        #[unsafe(method(format))]
1523        #[unsafe(method_family = none)]
1524        pub unsafe fn format(&self) -> Option<Retained<NSDictionary<NSString, AnyObject>>>;
1525
1526        /// A dictionary of AVVideoSettings keys specifying the RAW file format to be used for the RAW photo.
1527        ///
1528        /// One can specify desired format properties of the RAW file that will be created. Currently only the key AVVideoAppleProRAWBitDepthKey is allowed and the value to which it can be set should be from 8-16.  The AVVideoCodecKey must be present in the receiver's -availableRawPhotoCodecTypes array as well as in -supportedRawPhotoCodecTypesForRawPhotoPixelFormatType:fileType:. AVVideoQualityKey (NSNumber in range [0.0,1.0]) can be optionally set and a value between [0.0,1.0] will use lossy compression with lower values being more lossy resulting in smaller file sizes but lower image quality, while a value of 1.0 will use lossless compression resulting in the largest file size but also the best quality.
1529        #[unsafe(method(rawFileFormat))]
1530        #[unsafe(method_family = none)]
1531        pub unsafe fn rawFileFormat(&self) -> Option<Retained<NSDictionary<NSString, AnyObject>>>;
1532
1533        /// Setter for [`rawFileFormat`][Self::rawFileFormat].
1534        ///
1535        /// This is [copied][objc2_foundation::NSCopying::copy] when set.
1536        ///
1537        /// # Safety
1538        ///
1539        /// `raw_file_format` generic should be of the correct type.
1540        #[unsafe(method(setRawFileFormat:))]
1541        #[unsafe(method_family = none)]
1542        pub unsafe fn setRawFileFormat(
1543            &self,
1544            raw_file_format: Option<&NSDictionary<NSString, AnyObject>>,
1545        );
1546
1547        #[cfg(feature = "AVMediaFormat")]
1548        /// The file container for which the processed photo is formatted to be stored.
1549        ///
1550        ///
1551        /// The formatting of data within a photo buffer is often dependent on the file format intended for storage. For instance, a JPEG encoded photo buffer intended for storage in a JPEG (JPEG File Interchange Format) file differs from JPEG to be stored in HEIF. The HEIF-containerized JPEG buffer is tiled for readback efficiency and partitioned into the box structure dictated by the HEIF file format. Some codecs are only supported by AVCapturePhotoOutput if containerized. For instance, the AVVideoCodecTypeHEVC is only supported with AVFileTypeHEIF and AVFileTypeHEIC formatting. To discover which photo pixel format types and video codecs are supported for a given file type, you may query AVCapturePhotoOutput's -supportedPhotoPixelFormatTypesForFileType:, or -supportedPhotoCodecTypesForFileType: respectively.
1552        #[unsafe(method(processedFileType))]
1553        #[unsafe(method_family = none)]
1554        pub unsafe fn processedFileType(&self) -> Option<Retained<AVFileType>>;
1555
1556        /// A Bayer RAW or Apple ProRAW pixel format OSType (defined in CVPixelBuffer.h).
1557        ///
1558        ///
1559        /// The rawPixelFormatType you specified in one of the creation methods. Returns 0 if you did not specify RAW capture. See AVCapturePhotoOutput's -capturePhotoWithSettings:delegate: inline documentation for a discussion of restrictions on AVCapturePhotoSettings when requesting RAW capture.
1560        #[unsafe(method(rawPhotoPixelFormatType))]
1561        #[unsafe(method_family = none)]
1562        pub unsafe fn rawPhotoPixelFormatType(&self) -> OSType;
1563
1564        #[cfg(feature = "AVMediaFormat")]
1565        /// The file container for which the RAW photo is formatted to be stored.
1566        ///
1567        ///
1568        /// The formatting of data within a RAW photo buffer may be dependent on the file format intended for storage. To discover which RAW photo pixel format types are supported for a given file type, you may query AVCapturePhotoOutput's -supportedRawPhotoPixelFormatTypesForFileType:.
1569        #[unsafe(method(rawFileType))]
1570        #[unsafe(method_family = none)]
1571        pub unsafe fn rawFileType(&self) -> Option<Retained<AVFileType>>;
1572
1573        #[cfg(feature = "AVCaptureDevice")]
1574        /// Specifies whether the flash should be on, off, or chosen automatically by AVCapturePhotoOutput.
1575        ///
1576        ///
1577        /// flashMode takes the place of the deprecated AVCaptureDevice -flashMode API. Setting AVCaptureDevice.flashMode has no effect on AVCapturePhotoOutput, which only pays attention to the flashMode specified in your AVCapturePhotoSettings. The default value is AVCaptureFlashModeOff. Flash modes are defined in AVCaptureDevice.h. If you specify a flashMode of AVCaptureFlashModeOn, it wins over autoStillImageStabilizationEnabled=YES. When the device becomes very hot, the flash becomes temporarily unavailable until the device cools down (see AVCaptureDevice's -flashAvailable). While the flash is unavailable, AVCapturePhotoOutput's -supportedFlashModes property still reports AVCaptureFlashModeOn and AVCaptureFlashModeAuto as being available, thus allowing you to specify a flashMode of AVCaptureModeOn. You should always check the AVCaptureResolvedPhotoSettings provided to you in the AVCapturePhotoCaptureDelegate callbacks, as the resolved flashEnabled property will tell you definitively if the flash is being used.
1578        #[unsafe(method(flashMode))]
1579        #[unsafe(method_family = none)]
1580        pub unsafe fn flashMode(&self) -> AVCaptureFlashMode;
1581
1582        #[cfg(feature = "AVCaptureDevice")]
1583        /// Setter for [`flashMode`][Self::flashMode].
1584        #[unsafe(method(setFlashMode:))]
1585        #[unsafe(method_family = none)]
1586        pub unsafe fn setFlashMode(&self, flash_mode: AVCaptureFlashMode);
1587
1588        /// Specifies whether red-eye reduction should be applied automatically on flash captures.
1589        ///
1590        ///
1591        /// Default is YES on platforms that support automatic red-eye reduction unless you are capturing a bracket using AVCapturePhotoBracketSettings or a RAW photo without a processed photo.  For RAW photos with a processed photo the red-eye reduction will be applied to the processed photo only (RAW photos by definition are not processed). When set to YES, red-eye reduction is applied as needed for flash captures if the photo output's autoRedEyeReductionSupported property returns YES.
1592        #[unsafe(method(isAutoRedEyeReductionEnabled))]
1593        #[unsafe(method_family = none)]
1594        pub unsafe fn isAutoRedEyeReductionEnabled(&self) -> bool;
1595
1596        /// Setter for [`isAutoRedEyeReductionEnabled`][Self::isAutoRedEyeReductionEnabled].
1597        #[unsafe(method(setAutoRedEyeReductionEnabled:))]
1598        #[unsafe(method_family = none)]
1599        pub unsafe fn setAutoRedEyeReductionEnabled(&self, auto_red_eye_reduction_enabled: bool);
1600
1601        /// Indicates how photo quality should be prioritized against speed of photo delivery.
1602        ///
1603        ///
1604        /// Default value is AVCapturePhotoQualityPrioritizationBalanced. The AVCapturePhotoOutput is capable of applying a variety of techniques to improve photo quality (reduce noise, preserve detail in low light, freeze motion, etc), depending on the source device's activeFormat. Some of these techniques can take significant processing time before the photo is returned to your delegate callback. The photoQualityPrioritization property allows you to specify your preferred quality vs speed of delivery. By default, speed and quality are considered to be of equal importance. When you specify AVCapturePhotoQualityPrioritizationSpeed, you indicate that speed should be prioritized at the expense of quality. Likewise, when you choose AVCapturePhotoQualityPrioritizationQuality, you signal your willingness to prioritize the very best quality at the expense of speed, and your readiness to wait (perhaps significantly) longer for the photo to be returned to your delegate.
1605        #[unsafe(method(photoQualityPrioritization))]
1606        #[unsafe(method_family = none)]
1607        pub unsafe fn photoQualityPrioritization(&self) -> AVCapturePhotoQualityPrioritization;
1608
1609        /// Setter for [`photoQualityPrioritization`][Self::photoQualityPrioritization].
1610        #[unsafe(method(setPhotoQualityPrioritization:))]
1611        #[unsafe(method_family = none)]
1612        pub unsafe fn setPhotoQualityPrioritization(
1613            &self,
1614            photo_quality_prioritization: AVCapturePhotoQualityPrioritization,
1615        );
1616
1617        /// Specifies whether still image stabilization should be used automatically.
1618        ///
1619        ///
1620        /// Default is YES unless you are capturing a Bayer RAW photo (Bayer RAW photos may not be processed by definition) or a bracket using AVCapturePhotoBracketSettings. When set to YES, still image stabilization is applied automatically in low light to counteract hand shake. If the device has optical image stabilization, autoStillImageStabilizationEnabled makes use of lens stabilization as well.
1621        ///
1622        /// As of iOS 13 hardware, the AVCapturePhotoOutput is capable of applying a variety of multi-image fusion techniques to improve photo quality (reduce noise, preserve detail in low light, freeze motion, etc), all of which have been previously lumped under the stillImageStabilization moniker. This property should no longer be used as it no longer provides meaningful information about the techniques used to improve quality in a photo capture. Instead, you should use -photoQualityPrioritization to indicate your preferred quality vs speed.
1623        #[deprecated]
1624        #[unsafe(method(isAutoStillImageStabilizationEnabled))]
1625        #[unsafe(method_family = none)]
1626        pub unsafe fn isAutoStillImageStabilizationEnabled(&self) -> bool;
1627
1628        /// Setter for [`isAutoStillImageStabilizationEnabled`][Self::isAutoStillImageStabilizationEnabled].
1629        #[deprecated]
1630        #[unsafe(method(setAutoStillImageStabilizationEnabled:))]
1631        #[unsafe(method_family = none)]
1632        pub unsafe fn setAutoStillImageStabilizationEnabled(
1633            &self,
1634            auto_still_image_stabilization_enabled: bool,
1635        );
1636
1637        /// Specifies whether virtual device image fusion should be used automatically.
1638        ///
1639        ///
1640        /// Default is YES unless you are capturing a RAW photo (RAW photos may not be processed by definition) or a bracket using AVCapturePhotoBracketSettings. When set to YES, and -[AVCapturePhotoOutput isVirtualDeviceFusionSupported] is also YES, constituent camera images of a virtual device may be fused to improve still image quality, depending on the current zoom factor, light levels, and focus position. You may determine whether virtual device fusion is enabled for a particular capture request by inspecting the virtualDeviceFusionEnabled property of the AVCaptureResolvedPhotoSettings. Note that when using the deprecated AVCaptureStillImageOutput interface with a virtual device, autoVirtualDeviceFusionEnabled fusion is always enabled if supported, and may not be turned off.
1641        #[unsafe(method(isAutoVirtualDeviceFusionEnabled))]
1642        #[unsafe(method_family = none)]
1643        pub unsafe fn isAutoVirtualDeviceFusionEnabled(&self) -> bool;
1644
1645        /// Setter for [`isAutoVirtualDeviceFusionEnabled`][Self::isAutoVirtualDeviceFusionEnabled].
1646        #[unsafe(method(setAutoVirtualDeviceFusionEnabled:))]
1647        #[unsafe(method_family = none)]
1648        pub unsafe fn setAutoVirtualDeviceFusionEnabled(
1649            &self,
1650            auto_virtual_device_fusion_enabled: bool,
1651        );
1652
1653        /// Specifies whether DualCamera image fusion should be used automatically.
1654        ///
1655        ///
1656        /// Default is YES unless you are capturing a RAW photo (RAW photos may not be processed by definition) or a bracket using AVCapturePhotoBracketSettings. When set to YES, and -[AVCapturePhotoOutput isDualCameraFusionSupported] is also YES, wide-angle and telephoto images may be fused to improve still image quality, depending on the current zoom factor, light levels, and focus position. You may determine whether DualCamera fusion is enabled for a particular capture request by inspecting the dualCameraFusionEnabled property of the AVCaptureResolvedPhotoSettings. Note that when using the deprecated AVCaptureStillImageOutput interface with the DualCamera, auto DualCamera fusion is always enabled and may not be turned off. As of iOS 13, this property is deprecated in favor of autoVirtualDeviceFusionEnabled.
1657        #[deprecated]
1658        #[unsafe(method(isAutoDualCameraFusionEnabled))]
1659        #[unsafe(method_family = none)]
1660        pub unsafe fn isAutoDualCameraFusionEnabled(&self) -> bool;
1661
1662        /// Setter for [`isAutoDualCameraFusionEnabled`][Self::isAutoDualCameraFusionEnabled].
1663        #[deprecated]
1664        #[unsafe(method(setAutoDualCameraFusionEnabled:))]
1665        #[unsafe(method_family = none)]
1666        pub unsafe fn setAutoDualCameraFusionEnabled(&self, auto_dual_camera_fusion_enabled: bool);
1667
1668        #[cfg(feature = "AVCaptureDevice")]
1669        /// Specifies the constituent devices for which the virtual device should deliver photos.
1670        ///
1671        ///
1672        /// Default is empty array. To opt in for constituent device photo delivery, you may set this property to any subset of 2 or more of the devices in virtualDevice.constituentDevices. Your captureOutput:didFinishProcessingPhoto:error: callback will be called n times -- one for each of the devices you include in the array. You may only set this property to a non-nil array if you've set your AVCapturePhotoOutput's virtualDeviceConstituentPhotoDeliveryEnabled property to YES, and your delegate responds to the captureOutput:didFinishProcessingPhoto:error: selector.
1673        #[unsafe(method(virtualDeviceConstituentPhotoDeliveryEnabledDevices))]
1674        #[unsafe(method_family = none)]
1675        pub unsafe fn virtualDeviceConstituentPhotoDeliveryEnabledDevices(
1676            &self,
1677        ) -> Retained<NSArray<AVCaptureDevice>>;
1678
1679        #[cfg(feature = "AVCaptureDevice")]
1680        /// Setter for [`virtualDeviceConstituentPhotoDeliveryEnabledDevices`][Self::virtualDeviceConstituentPhotoDeliveryEnabledDevices].
1681        ///
1682        /// This is [copied][objc2_foundation::NSCopying::copy] when set.
1683        #[unsafe(method(setVirtualDeviceConstituentPhotoDeliveryEnabledDevices:))]
1684        #[unsafe(method_family = none)]
1685        pub unsafe fn setVirtualDeviceConstituentPhotoDeliveryEnabledDevices(
1686            &self,
1687            virtual_device_constituent_photo_delivery_enabled_devices: &NSArray<AVCaptureDevice>,
1688        );
1689
1690        /// Specifies whether the DualCamera should return both the telephoto and wide image.
1691        ///
1692        ///
1693        /// Default is NO. When set to YES, your captureOutput:didFinishProcessingPhoto:error: callback will receive twice the number of callbacks, as both the telephoto image(s) and wide-angle image(s) are delivered. You may only set this property to YES if you've set your AVCapturePhotoOutput's dualCameraDualPhotoDeliveryEnabled property to YES, and your delegate responds to the captureOutput:didFinishProcessingPhoto:error: selector. As of iOS 13, this property is deprecated in favor of virtualDeviceConstituentPhotoDeliveryEnabledDevices.
1694        #[deprecated]
1695        #[unsafe(method(isDualCameraDualPhotoDeliveryEnabled))]
1696        #[unsafe(method_family = none)]
1697        pub unsafe fn isDualCameraDualPhotoDeliveryEnabled(&self) -> bool;
1698
1699        /// Setter for [`isDualCameraDualPhotoDeliveryEnabled`][Self::isDualCameraDualPhotoDeliveryEnabled].
1700        #[deprecated]
1701        #[unsafe(method(setDualCameraDualPhotoDeliveryEnabled:))]
1702        #[unsafe(method_family = none)]
1703        pub unsafe fn setDualCameraDualPhotoDeliveryEnabled(
1704            &self,
1705            dual_camera_dual_photo_delivery_enabled: bool,
1706        );
1707
1708        /// Specifies whether photos should be captured at the highest resolution supported by the source AVCaptureDevice's activeFormat.
1709        ///
1710        ///
1711        /// Default is NO. By default, AVCapturePhotoOutput emits images with the same dimensions as its source AVCaptureDevice's activeFormat.formatDescription. However, if you set this property to YES, the AVCapturePhotoOutput emits images at its source AVCaptureDevice's activeFormat.highResolutionStillImageDimensions. Note that if you enable video stabilization (see AVCaptureConnection's preferredVideoStabilizationMode) for any output, the high resolution photos emitted by AVCapturePhotoOutput may be smaller by 10 or more percent. You may inspect your AVCaptureResolvedPhotoSettings in the delegate callbacks to discover the exact dimensions of the capture photo(s).
1712        ///
1713        /// Starting in iOS 14.5 if you disable geometric distortion correction, the high resolution photo emitted by AVCapturePhotoOutput may be is smaller depending on the format.
1714        #[deprecated = "Use maxPhotoDimensions instead."]
1715        #[unsafe(method(isHighResolutionPhotoEnabled))]
1716        #[unsafe(method_family = none)]
1717        pub unsafe fn isHighResolutionPhotoEnabled(&self) -> bool;
1718
1719        /// Setter for [`isHighResolutionPhotoEnabled`][Self::isHighResolutionPhotoEnabled].
1720        #[deprecated = "Use maxPhotoDimensions instead."]
1721        #[unsafe(method(setHighResolutionPhotoEnabled:))]
1722        #[unsafe(method_family = none)]
1723        pub unsafe fn setHighResolutionPhotoEnabled(&self, high_resolution_photo_enabled: bool);
1724
1725        #[cfg(feature = "objc2-core-media")]
1726        /// Indicates the maximum resolution photo that will be captured.
1727        ///
1728        ///
1729        /// By setting this property you are requesting an image that may be up to as large as the specified dimensions, but no larger. The dimensions set must match one of the dimensions returned by AVCaptureDeviceFormat.supportedMaxPhotoDimensions for the currently configured format and be equal to or smaller than the value of AVCapturePhotoOutput.maxPhotoDimensions. This property defaults to the smallest dimensions returned by AVCaptureDeviceFormat.supportedMaxPhotoDimensions.
1730        #[unsafe(method(maxPhotoDimensions))]
1731        #[unsafe(method_family = none)]
1732        pub unsafe fn maxPhotoDimensions(&self) -> CMVideoDimensions;
1733
1734        #[cfg(feature = "objc2-core-media")]
1735        /// Setter for [`maxPhotoDimensions`][Self::maxPhotoDimensions].
1736        #[unsafe(method(setMaxPhotoDimensions:))]
1737        #[unsafe(method_family = none)]
1738        pub unsafe fn setMaxPhotoDimensions(&self, max_photo_dimensions: CMVideoDimensions);
1739
1740        /// Specifies whether AVDepthData should be captured along with the photo.
1741        ///
1742        ///
1743        /// Default is NO. Set to YES if you wish to receive depth data with your photo. Throws an exception if -[AVCapturePhotoOutput depthDataDeliveryEnabled] is not set to YES or your delegate does not respond to the captureOutput:didFinishProcessingPhoto:error: selector. Note that setting this property to YES may add significant processing time to the delivery of your didFinishProcessingPhoto: callback.
1744        ///
1745        /// For best rendering results in Apple's Photos.app, portrait photos should be captured with both embedded depth data and a portrait effects matte (see portraitEffectsMatteDeliveryEnabled). When supported, it is recommended to opt in for both of these auxiliary images in your photo captures involving depth.
1746        #[unsafe(method(isDepthDataDeliveryEnabled))]
1747        #[unsafe(method_family = none)]
1748        pub unsafe fn isDepthDataDeliveryEnabled(&self) -> bool;
1749
1750        /// Setter for [`isDepthDataDeliveryEnabled`][Self::isDepthDataDeliveryEnabled].
1751        #[unsafe(method(setDepthDataDeliveryEnabled:))]
1752        #[unsafe(method_family = none)]
1753        pub unsafe fn setDepthDataDeliveryEnabled(&self, depth_data_delivery_enabled: bool);
1754
1755        /// Specifies whether depth data included with this photo should be written to the photo's file structure.
1756        ///
1757        ///
1758        /// Default is YES. When depthDataDeliveryEnabled is set to YES, this property specifies whether the included depth data should be written to the resulting photo's internal file structure. Depth data is currently only supported in HEIF and JPEG. This property is ignored if depthDataDeliveryEnabled is set to NO.
1759        #[unsafe(method(embedsDepthDataInPhoto))]
1760        #[unsafe(method_family = none)]
1761        pub unsafe fn embedsDepthDataInPhoto(&self) -> bool;
1762
1763        /// Setter for [`embedsDepthDataInPhoto`][Self::embedsDepthDataInPhoto].
1764        #[unsafe(method(setEmbedsDepthDataInPhoto:))]
1765        #[unsafe(method_family = none)]
1766        pub unsafe fn setEmbedsDepthDataInPhoto(&self, embeds_depth_data_in_photo: bool);
1767
1768        /// Specifies whether the depth data delivered with the photo should be filtered to fill invalid values.
1769        ///
1770        ///
1771        /// Default is YES. This property is ignored unless depthDataDeliveryEnabled is set to YES. Depth data maps may contain invalid pixel values due to a variety of factors including occlusions and low light. When depthDataFiltered is set to YES, the photo output interpolates missing data, filling in all holes.
1772        #[unsafe(method(isDepthDataFiltered))]
1773        #[unsafe(method_family = none)]
1774        pub unsafe fn isDepthDataFiltered(&self) -> bool;
1775
1776        /// Setter for [`isDepthDataFiltered`][Self::isDepthDataFiltered].
1777        #[unsafe(method(setDepthDataFiltered:))]
1778        #[unsafe(method_family = none)]
1779        pub unsafe fn setDepthDataFiltered(&self, depth_data_filtered: bool);
1780
1781        /// Specifies whether AVCameraCalibrationData should be captured and delivered along with this photo.
1782        ///
1783        ///
1784        /// Default is NO. Set to YES if you wish to receive camera calibration data with your photo. Camera calibration data is delivered as a property of an AVCapturePhoto, so if you are using the CMSampleBuffer delegate callbacks rather than -captureOutput:didFinishProcessingPhoto:error:, an exception is thrown. Also, you may only set this property to YES if your AVCapturePhotoOutput's cameraCalibrationDataDeliverySupported property is YES and 2 or more devices are selected for virtual device constituent photo delivery. When requesting virtual device constituent photo delivery plus camera calibration data, the photos for each constituent device each contain camera calibration data. Note that AVCameraCalibrationData can be delivered as a property of an AVCapturePhoto or an AVDepthData, thus your delegate must respond to the captureOutput:didFinishProcessingPhoto:error: selector.
1785        #[unsafe(method(isCameraCalibrationDataDeliveryEnabled))]
1786        #[unsafe(method_family = none)]
1787        pub unsafe fn isCameraCalibrationDataDeliveryEnabled(&self) -> bool;
1788
1789        /// Setter for [`isCameraCalibrationDataDeliveryEnabled`][Self::isCameraCalibrationDataDeliveryEnabled].
1790        #[unsafe(method(setCameraCalibrationDataDeliveryEnabled:))]
1791        #[unsafe(method_family = none)]
1792        pub unsafe fn setCameraCalibrationDataDeliveryEnabled(
1793            &self,
1794            camera_calibration_data_delivery_enabled: bool,
1795        );
1796
1797        /// Specifies whether an AVPortraitEffectsMatte should be captured along with the photo.
1798        ///
1799        ///
1800        /// Default is NO. Set to YES if you wish to receive a portrait effects matte with your photo. Throws an exception if -[AVCapturePhotoOutput portraitEffectsMatteDeliveryEnabled] is not set to YES or your delegate does not respond to the captureOutput:didFinishProcessingPhoto:error: selector. Portrait effects matte generation requires depth to be present, so if you wish to enable portrait effects matte delivery, you must set depthDataDeliveryEnabled to YES. Setting this property to YES does not guarantee that a portrait effects matte will be present in the resulting AVCapturePhoto. As the property name implies, the matte is primarily used to improve the rendering quality of portrait effects on the image. If the photo's content lacks a clear foreground subject, no portrait effects matte is generated, and the -[AVCapturePhoto portraitEffectsMatte] property returns nil. Note that setting this property to YES may add significant processing time to the delivery of your didFinishProcessingPhoto: callback.
1801        ///
1802        /// For best rendering results in Apple's Photos.app, portrait photos should be captured with both embedded depth data (see depthDataDeliveryEnabled) and a portrait effects matte. When supported, it is recommended to opt in for both of these auxiliary images in your photo captures involving depth.
1803        #[unsafe(method(isPortraitEffectsMatteDeliveryEnabled))]
1804        #[unsafe(method_family = none)]
1805        pub unsafe fn isPortraitEffectsMatteDeliveryEnabled(&self) -> bool;
1806
1807        /// Setter for [`isPortraitEffectsMatteDeliveryEnabled`][Self::isPortraitEffectsMatteDeliveryEnabled].
1808        #[unsafe(method(setPortraitEffectsMatteDeliveryEnabled:))]
1809        #[unsafe(method_family = none)]
1810        pub unsafe fn setPortraitEffectsMatteDeliveryEnabled(
1811            &self,
1812            portrait_effects_matte_delivery_enabled: bool,
1813        );
1814
1815        /// Specifies whether the portrait effects matte captured with this photo should be written to the photo's file structure.
1816        ///
1817        ///
1818        /// Default is YES. When portraitEffectsMatteDeliveryEnabled is set to YES, this property specifies whether the included portrait effects matte should be written to the resulting photo's internal file structure. Portrait effects mattes are currently only supported in HEIF and JPEG. This property is ignored if portraitEffectsMatteDeliveryEnabled is set to NO.
1819        #[unsafe(method(embedsPortraitEffectsMatteInPhoto))]
1820        #[unsafe(method_family = none)]
1821        pub unsafe fn embedsPortraitEffectsMatteInPhoto(&self) -> bool;
1822
1823        /// Setter for [`embedsPortraitEffectsMatteInPhoto`][Self::embedsPortraitEffectsMatteInPhoto].
1824        #[unsafe(method(setEmbedsPortraitEffectsMatteInPhoto:))]
1825        #[unsafe(method_family = none)]
1826        pub unsafe fn setEmbedsPortraitEffectsMatteInPhoto(
1827            &self,
1828            embeds_portrait_effects_matte_in_photo: bool,
1829        );
1830
1831        #[cfg(feature = "AVSemanticSegmentationMatte")]
1832        /// Specifies which types of AVSemanticSegmentationMatte should be captured along with the photo.
1833        ///
1834        ///
1835        /// Default is empty array. You may set this property to an array of AVSemanticSegmentationMatteTypes you'd like to capture. Throws an exception if -[AVCapturePhotoOutput enabledSemanticSegmentationMatteTypes] does not contain any of the AVSemanticSegmentationMatteTypes specified. In other words, when setting up a capture session, you opt in for the superset of segmentation matte types you might like to receive, and then on a shot-by-shot basis, you may opt in to all or a subset of the previously specified types by setting this property. An exception is also thrown during -[AVCapturePhotoOutput capturePhotoWithSettings:delegate:] if your delegate does not respond to the captureOutput:didFinishProcessingPhoto:error: selector. Setting this property to YES does not guarantee that the specified mattes will be present in the resulting AVCapturePhoto. If the photo's content lacks any persons, for instance, no hair, skin, or teeth mattes are generated, and the -[AVCapturePhoto semanticSegmentationMatteForType:] property returns nil. Note that setting this property to YES may add significant processing time to the delivery of your didFinishProcessingPhoto: callback.
1836        #[unsafe(method(enabledSemanticSegmentationMatteTypes))]
1837        #[unsafe(method_family = none)]
1838        pub unsafe fn enabledSemanticSegmentationMatteTypes(
1839            &self,
1840        ) -> Retained<NSArray<AVSemanticSegmentationMatteType>>;
1841
1842        #[cfg(feature = "AVSemanticSegmentationMatte")]
1843        /// Setter for [`enabledSemanticSegmentationMatteTypes`][Self::enabledSemanticSegmentationMatteTypes].
1844        ///
1845        /// This is [copied][objc2_foundation::NSCopying::copy] when set.
1846        #[unsafe(method(setEnabledSemanticSegmentationMatteTypes:))]
1847        #[unsafe(method_family = none)]
1848        pub unsafe fn setEnabledSemanticSegmentationMatteTypes(
1849            &self,
1850            enabled_semantic_segmentation_matte_types: &NSArray<AVSemanticSegmentationMatteType>,
1851        );
1852
1853        /// Specifies whether enabledSemanticSegmentationMatteTypes captured with this photo should be written to the photo's file structure.
1854        ///
1855        ///
1856        /// Default is YES. This property specifies whether the captured semantic segmentation mattes should be written to the resulting photo's internal file structure. Semantic segmentation mattes are currently only supported in HEIF and JPEG. This property is ignored if enabledSemanticSegmentationMatteTypes is set to an empty array.
1857        #[unsafe(method(embedsSemanticSegmentationMattesInPhoto))]
1858        #[unsafe(method_family = none)]
1859        pub unsafe fn embedsSemanticSegmentationMattesInPhoto(&self) -> bool;
1860
1861        /// Setter for [`embedsSemanticSegmentationMattesInPhoto`][Self::embedsSemanticSegmentationMattesInPhoto].
1862        #[unsafe(method(setEmbedsSemanticSegmentationMattesInPhoto:))]
1863        #[unsafe(method_family = none)]
1864        pub unsafe fn setEmbedsSemanticSegmentationMattesInPhoto(
1865            &self,
1866            embeds_semantic_segmentation_mattes_in_photo: bool,
1867        );
1868
1869        /// A dictionary of metadata key/value pairs you'd like to have written to each photo in the capture request.
1870        ///
1871        ///
1872        /// Valid metadata keys are found in
1873        /// <ImageIO
1874        /// /CGImageProperties.h>. AVCapturePhotoOutput inserts a base set of metadata into each photo it captures, such as kCGImagePropertyOrientation, kCGImagePropertyExifDictionary, and kCGImagePropertyMakerAppleDictionary. You may specify metadata keys and values that should be written to each photo in the capture request. If you've specified metadata that also appears in AVCapturePhotoOutput's base set, your value replaces the base value. An NSInvalidArgumentException is thrown if you specify keys other than those found in
1875        /// <ImageIO
1876        /// /CGImageProperties.h>.
1877        #[unsafe(method(metadata))]
1878        #[unsafe(method_family = none)]
1879        pub unsafe fn metadata(&self) -> Retained<NSDictionary<NSString, AnyObject>>;
1880
1881        /// Setter for [`metadata`][Self::metadata].
1882        ///
1883        /// This is [copied][objc2_foundation::NSCopying::copy] when set.
1884        ///
1885        /// # Safety
1886        ///
1887        /// `metadata` generic should be of the correct type.
1888        #[unsafe(method(setMetadata:))]
1889        #[unsafe(method_family = none)]
1890        pub unsafe fn setMetadata(&self, metadata: &NSDictionary<NSString, AnyObject>);
1891
1892        /// Specifies that a Live Photo movie be captured to complement the still photo.
1893        ///
1894        ///
1895        /// A Live Photo movie is a short movie (with audio, if you've added an audio input to your session) containing the moments right before and after the still photo. A QuickTime movie file will be written to disk at the URL specified if it is a valid file URL accessible to your app's sandbox. You may only set this property if AVCapturePhotoOutput's livePhotoCaptureSupported property is YES. When you specify a Live Photo, your AVCapturePhotoCaptureDelegate object must implement -captureOutput:didFinishProcessingLivePhotoToMovieFileAtURL:duration:photoDisplayTime:resolvedSettings:error:.
1896        #[unsafe(method(livePhotoMovieFileURL))]
1897        #[unsafe(method_family = none)]
1898        pub unsafe fn livePhotoMovieFileURL(&self) -> Option<Retained<NSURL>>;
1899
1900        /// Setter for [`livePhotoMovieFileURL`][Self::livePhotoMovieFileURL].
1901        ///
1902        /// This is [copied][objc2_foundation::NSCopying::copy] when set.
1903        #[unsafe(method(setLivePhotoMovieFileURL:))]
1904        #[unsafe(method_family = none)]
1905        pub unsafe fn setLivePhotoMovieFileURL(&self, live_photo_movie_file_url: Option<&NSURL>);
1906
1907        #[cfg(feature = "AVVideoSettings")]
1908        /// Specifies the video codec type to use when compressing video for the Live Photo movie complement.
1909        ///
1910        ///
1911        /// Prior to iOS 11, all Live Photo movie video tracks are compressed using H.264. Beginning in iOS 11, you can select the Live Photo movie video compression format by specifying one of the strings present in AVCapturePhotoOutput's availableLivePhotoVideoCodecTypes array.
1912        #[unsafe(method(livePhotoVideoCodecType))]
1913        #[unsafe(method_family = none)]
1914        pub unsafe fn livePhotoVideoCodecType(&self) -> Retained<AVVideoCodecType>;
1915
1916        #[cfg(feature = "AVVideoSettings")]
1917        /// Setter for [`livePhotoVideoCodecType`][Self::livePhotoVideoCodecType].
1918        ///
1919        /// This is [copied][objc2_foundation::NSCopying::copy] when set.
1920        #[unsafe(method(setLivePhotoVideoCodecType:))]
1921        #[unsafe(method_family = none)]
1922        pub unsafe fn setLivePhotoVideoCodecType(
1923            &self,
1924            live_photo_video_codec_type: &AVVideoCodecType,
1925        );
1926
1927        #[cfg(feature = "AVMetadataItem")]
1928        /// Movie-level metadata to be written to the Live Photo movie.
1929        ///
1930        ///
1931        /// An array of AVMetadataItems to be inserted into the top level of the Live Photo movie. The receiver makes immutable copies of the AVMetadataItems in the array. Live Photo movies always contain a AVMetadataQuickTimeMetadataKeyContentIdentifier which allow them to be paired with a similar identifier in the MakerNote of the photo complement. AVCapturePhotoSettings generates a unique content identifier for you. If you provide a metadata array containing an AVMetadataItem with keyspace = AVMetadataKeySpaceQuickTimeMetadata and key = AVMetadataQuickTimeMetadataKeyContentIdentifier, an NSInvalidArgumentException is thrown.
1932        #[unsafe(method(livePhotoMovieMetadata))]
1933        #[unsafe(method_family = none)]
1934        pub unsafe fn livePhotoMovieMetadata(&self) -> Retained<NSArray<AVMetadataItem>>;
1935
1936        #[cfg(feature = "AVMetadataItem")]
1937        /// Setter for [`livePhotoMovieMetadata`][Self::livePhotoMovieMetadata].
1938        ///
1939        /// This is [copied][objc2_foundation::NSCopying::copy] when set.
1940        #[unsafe(method(setLivePhotoMovieMetadata:))]
1941        #[unsafe(method_family = none)]
1942        pub unsafe fn setLivePhotoMovieMetadata(
1943            &self,
1944            live_photo_movie_metadata: Option<&NSArray<AVMetadataItem>>,
1945        );
1946
1947        /// An array of available kCVPixelBufferPixelFormatTypeKeys that may be used when specifying a previewPhotoFormat.
1948        ///
1949        ///
1950        /// The array is sorted such that the preview format requiring the fewest conversions is presented first.
1951        #[unsafe(method(availablePreviewPhotoPixelFormatTypes))]
1952        #[unsafe(method_family = none)]
1953        pub unsafe fn availablePreviewPhotoPixelFormatTypes(&self) -> Retained<NSArray<NSNumber>>;
1954
1955        /// A dictionary of Core Video pixel buffer attributes specifying the preview photo format to be delivered along with the RAW or processed photo.
1956        ///
1957        ///
1958        /// A dictionary of pixel buffer attributes specifying a smaller version of the RAW or processed photo for preview purposes. The kCVPixelBufferPixelFormatTypeKey is required and must be present in the receiver's -availablePreviewPhotoPixelFormatTypes array. Optional keys are { kCVPixelBufferWidthKey | kCVPixelBufferHeightKey }. If you wish to specify dimensions, you must add both width and height. Width and height are only honored up to the display dimensions. If you specify a width and height whose aspect ratio differs from the RAW or processed photo, the larger of the two dimensions is honored and aspect ratio of the RAW or processed photo is always preserved.
1959        #[unsafe(method(previewPhotoFormat))]
1960        #[unsafe(method_family = none)]
1961        pub unsafe fn previewPhotoFormat(
1962            &self,
1963        ) -> Option<Retained<NSDictionary<NSString, AnyObject>>>;
1964
1965        /// Setter for [`previewPhotoFormat`][Self::previewPhotoFormat].
1966        ///
1967        /// This is [copied][objc2_foundation::NSCopying::copy] when set.
1968        ///
1969        /// # Safety
1970        ///
1971        /// `preview_photo_format` generic should be of the correct type.
1972        #[unsafe(method(setPreviewPhotoFormat:))]
1973        #[unsafe(method_family = none)]
1974        pub unsafe fn setPreviewPhotoFormat(
1975            &self,
1976            preview_photo_format: Option<&NSDictionary<NSString, AnyObject>>,
1977        );
1978
1979        #[cfg(feature = "AVVideoSettings")]
1980        /// An array of available AVVideoCodecKeys that may be used when specifying an embeddedThumbnailPhotoFormat.
1981        ///
1982        ///
1983        /// The array is sorted such that the thumbnail codec type that is most backward compatible is listed first.
1984        #[unsafe(method(availableEmbeddedThumbnailPhotoCodecTypes))]
1985        #[unsafe(method_family = none)]
1986        pub unsafe fn availableEmbeddedThumbnailPhotoCodecTypes(
1987            &self,
1988        ) -> Retained<NSArray<AVVideoCodecType>>;
1989
1990        /// A dictionary of AVVideoSettings keys specifying the thumbnail format to be written to the processed or RAW photo.
1991        ///
1992        ///
1993        /// A dictionary of AVVideoSettings keys specifying a thumbnail (usually smaller) version of the processed photo to be embedded in that image before calling the AVCapturePhotoCaptureDelegate. This image is sometimes referred to as a "thumbnail image". The AVVideoCodecKey is required and must be present in the receiver's -availableEmbeddedThumbnailPhotoCodecTypes array. Optional keys are { AVVideoWidthKey | AVVideoHeightKey }. If you wish to specify dimensions, you must specify both width and height. If you specify a width and height whose aspect ratio differs from the processed photo, the larger of the two dimensions is honored and aspect ratio of the RAW or processed photo is always preserved. For RAW captures, use -rawEmbeddedThumbnailPhotoFormat to specify the thumbnail format you'd like to capture in the RAW image. For apps linked on or after iOS 12, the raw thumbnail format must be specified using the -rawEmbeddedThumbnailPhotoFormat API rather than -embeddedThumbnailPhotoFormat. Beginning in iOS 12, HEIC files may contain thumbnails up to the full resolution of the main image.
1994        #[unsafe(method(embeddedThumbnailPhotoFormat))]
1995        #[unsafe(method_family = none)]
1996        pub unsafe fn embeddedThumbnailPhotoFormat(
1997            &self,
1998        ) -> Option<Retained<NSDictionary<NSString, AnyObject>>>;
1999
2000        /// Setter for [`embeddedThumbnailPhotoFormat`][Self::embeddedThumbnailPhotoFormat].
2001        ///
2002        /// This is [copied][objc2_foundation::NSCopying::copy] when set.
2003        ///
2004        /// # Safety
2005        ///
2006        /// `embedded_thumbnail_photo_format` generic should be of the correct type.
2007        #[unsafe(method(setEmbeddedThumbnailPhotoFormat:))]
2008        #[unsafe(method_family = none)]
2009        pub unsafe fn setEmbeddedThumbnailPhotoFormat(
2010            &self,
2011            embedded_thumbnail_photo_format: Option<&NSDictionary<NSString, AnyObject>>,
2012        );
2013
2014        #[cfg(feature = "AVVideoSettings")]
2015        /// An array of available AVVideoCodecKeys that may be used when specifying a rawEmbeddedThumbnailPhotoFormat.
2016        ///
2017        ///
2018        /// The array is sorted such that the thumbnail codec type that is most backward compatible is listed first.
2019        #[unsafe(method(availableRawEmbeddedThumbnailPhotoCodecTypes))]
2020        #[unsafe(method_family = none)]
2021        pub unsafe fn availableRawEmbeddedThumbnailPhotoCodecTypes(
2022            &self,
2023        ) -> Retained<NSArray<AVVideoCodecType>>;
2024
2025        /// A dictionary of AVVideoSettings keys specifying the thumbnail format to be written to the RAW photo in a RAW photo request.
2026        ///
2027        ///
2028        /// A dictionary of AVVideoSettings keys specifying a thumbnail (usually smaller) version of the RAW photo to be embedded in that image's DNG before calling back the AVCapturePhotoCaptureDelegate. The AVVideoCodecKey is required and must be present in the receiver's -availableRawEmbeddedThumbnailPhotoCodecTypes array. Optional keys are { AVVideoWidthKey | AVVideoHeightKey }. If you wish to specify dimensions, you must specify both width and height. If you specify a width and height whose aspect ratio differs from the RAW or processed photo, the larger of the two dimensions is honored and aspect ratio of the RAW or processed photo is always preserved. For apps linked on or after iOS 12, the raw thumbnail format must be specified using the -rawEmbeddedThumbnailPhotoFormat API rather than -embeddedThumbnailPhotoFormat. Beginning in iOS 12, DNG files may contain thumbnails up to the full resolution of the RAW image.
2029        #[unsafe(method(rawEmbeddedThumbnailPhotoFormat))]
2030        #[unsafe(method_family = none)]
2031        pub unsafe fn rawEmbeddedThumbnailPhotoFormat(
2032            &self,
2033        ) -> Option<Retained<NSDictionary<NSString, AnyObject>>>;
2034
2035        /// Setter for [`rawEmbeddedThumbnailPhotoFormat`][Self::rawEmbeddedThumbnailPhotoFormat].
2036        ///
2037        /// This is [copied][objc2_foundation::NSCopying::copy] when set.
2038        ///
2039        /// # Safety
2040        ///
2041        /// `raw_embedded_thumbnail_photo_format` generic should be of the correct type.
2042        #[unsafe(method(setRawEmbeddedThumbnailPhotoFormat:))]
2043        #[unsafe(method_family = none)]
2044        pub unsafe fn setRawEmbeddedThumbnailPhotoFormat(
2045            &self,
2046            raw_embedded_thumbnail_photo_format: Option<&NSDictionary<NSString, AnyObject>>,
2047        );
2048
2049        /// Specifies whether the photo output should use content aware distortion correction on this photo request (at its discretion).
2050        ///
2051        ///
2052        /// Default is NO. Set to YES if you wish content aware distortion correction to be performed on your AVCapturePhotos, when the photo output deems it necessary. Photos may or may not benefit from distortion correction. For instance, photos lacking faces may be left as is. Setting this property to YES does introduce a small additional amount of latency to the photo processing. You may check your AVCaptureResolvedPhotoSettings to see whether content aware distortion correction will be enabled for a given photo request. Throws an exception if -[AVCapturePhotoOutput contentAwareDistortionCorrectionEnabled] is not set to YES.
2053        #[unsafe(method(isAutoContentAwareDistortionCorrectionEnabled))]
2054        #[unsafe(method_family = none)]
2055        pub unsafe fn isAutoContentAwareDistortionCorrectionEnabled(&self) -> bool;
2056
2057        /// Setter for [`isAutoContentAwareDistortionCorrectionEnabled`][Self::isAutoContentAwareDistortionCorrectionEnabled].
2058        #[unsafe(method(setAutoContentAwareDistortionCorrectionEnabled:))]
2059        #[unsafe(method_family = none)]
2060        pub unsafe fn setAutoContentAwareDistortionCorrectionEnabled(
2061            &self,
2062            auto_content_aware_distortion_correction_enabled: bool,
2063        );
2064
2065        /// Specifies whether the photo will be captured with constant color.
2066        ///
2067        ///
2068        /// Default is NO. Set to YES if you wish to capture a constant color photo. Throws an exception if -[AVCapturePhotoOutput constantColorEnabled] is not set to YES.
2069        #[unsafe(method(isConstantColorEnabled))]
2070        #[unsafe(method_family = none)]
2071        pub unsafe fn isConstantColorEnabled(&self) -> bool;
2072
2073        /// Setter for [`isConstantColorEnabled`][Self::isConstantColorEnabled].
2074        #[unsafe(method(setConstantColorEnabled:))]
2075        #[unsafe(method_family = none)]
2076        pub unsafe fn setConstantColorEnabled(&self, constant_color_enabled: bool);
2077
2078        /// Specifies whether a fallback photo is delivered when taking a constant color capture.
2079        ///
2080        ///
2081        /// Default is NO. Set to YES if you wish to receive a fallback photo that can be used in case the main constant color photo's confidence level is too low for your use case.
2082        #[unsafe(method(isConstantColorFallbackPhotoDeliveryEnabled))]
2083        #[unsafe(method_family = none)]
2084        pub unsafe fn isConstantColorFallbackPhotoDeliveryEnabled(&self) -> bool;
2085
2086        /// Setter for [`isConstantColorFallbackPhotoDeliveryEnabled`][Self::isConstantColorFallbackPhotoDeliveryEnabled].
2087        #[unsafe(method(setConstantColorFallbackPhotoDeliveryEnabled:))]
2088        #[unsafe(method_family = none)]
2089        pub unsafe fn setConstantColorFallbackPhotoDeliveryEnabled(
2090            &self,
2091            constant_color_fallback_photo_delivery_enabled: bool,
2092        );
2093
2094        /// Specifies whether the built-in shutter sound should be suppressed when capturing a photo with these settings.
2095        ///
2096        ///
2097        /// Default is NO. Set to YES if you wish to suppress AVCapturePhotoOutput's built-in shutter sound for this request. AVCapturePhotoOutput throws an NSInvalidArgumentException in `-capturePhotoWithSettings:` if its `shutterSoundSuppressionSupported` property returns NO.
2098        #[unsafe(method(isShutterSoundSuppressionEnabled))]
2099        #[unsafe(method_family = none)]
2100        pub unsafe fn isShutterSoundSuppressionEnabled(&self) -> bool;
2101
2102        /// Setter for [`isShutterSoundSuppressionEnabled`][Self::isShutterSoundSuppressionEnabled].
2103        #[unsafe(method(setShutterSoundSuppressionEnabled:))]
2104        #[unsafe(method_family = none)]
2105        pub unsafe fn setShutterSoundSuppressionEnabled(
2106            &self,
2107            shutter_sound_suppression_enabled: bool,
2108        );
2109    );
2110}
2111
2112/// Methods declared on superclass `NSObject`.
2113impl AVCapturePhotoSettings {
2114    extern_methods!(
2115        #[unsafe(method(init))]
2116        #[unsafe(method_family = init)]
2117        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
2118
2119        #[unsafe(method(new))]
2120        #[unsafe(method_family = new)]
2121        pub unsafe fn new() -> Retained<Self>;
2122    );
2123}
2124
2125extern_class!(
2126    /// A concrete subclass of AVCapturePhotoSettings that describes a bracketed capture.
2127    ///
2128    ///
2129    /// In addition to the properties expressed in the base class, an AVCapturePhotoBracketSettings contains an array of AVCaptureBracketedStillImageSettings objects, where each describes one individual photo in the bracket. bracketedSettings.count must be
2130    /// <
2131    /// = AVCapturePhotoOutput's -maxBracketedCapturePhotoCount. Capturing a photo bracket may require the allocation of additional resources.
2132    ///
2133    /// When you request a bracketed capture, your AVCapturePhotoCaptureDelegate's -captureOutput:didFinishProcessing{Photo | RawPhoto}... callbacks are called back bracketSettings.count times and provided with the corresponding AVCaptureBracketedStillImageSettings object from your request.
2134    ///
2135    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturephotobracketsettings?language=objc)
2136    #[unsafe(super(AVCapturePhotoSettings, NSObject))]
2137    #[derive(Debug, PartialEq, Eq, Hash)]
2138    pub struct AVCapturePhotoBracketSettings;
2139);
2140
2141extern_conformance!(
2142    unsafe impl NSCopying for AVCapturePhotoBracketSettings {}
2143);
2144
2145unsafe impl CopyingHelper for AVCapturePhotoBracketSettings {
2146    type Result = Self;
2147}
2148
2149extern_conformance!(
2150    unsafe impl NSObjectProtocol for AVCapturePhotoBracketSettings {}
2151);
2152
2153impl AVCapturePhotoBracketSettings {
2154    extern_methods!(
2155        #[cfg(feature = "AVCaptureStillImageOutput")]
2156        /// Creates an instance of AVCapturePhotoBracketSettings.
2157        ///
2158        ///
2159        /// Parameter `rawPixelFormatType`: One of the OSTypes contained in AVCapturePhotoOutput's -availableRawPhotoPixelFormatTypes array. May be set to 0 if you do not desire RAW capture.
2160        ///
2161        /// Parameter `processedFormat`: A dictionary of Core Video pixel buffer attributes or AVVideoSettings, analogous to AVCaptureStillImageOutput's outputSettings property. If you wish an uncompressed format, your dictionary must contain kCVPixelBufferPixelFormatTypeKey, and the format specified must be present in AVCapturePhotoOutput's -availablePhotoPixelFormatTypes array. kCVPixelBufferPixelFormatTypeKey is the only supported key when expressing uncompressed output. If you wish a compressed format, your dictionary must contain AVVideoCodecKey and the codec specified must be present in AVCapturePhotoOutput's -availablePhotoCodecTypes array. If you are specifying a compressed format, the AVVideoCompressionPropertiesKey is also supported, with a payload dictionary containing a single AVVideoQualityKey. If you only wish to capture RAW, you may pass a non-zero rawPixelFormatType and a nil processedFormat dictionary. If you pass a rawPixelFormatType of 0 AND a nil processedFormat dictionary, the default output of AVVideoCodecTypeJPEG will be delivered.
2162        ///
2163        /// Parameter `bracketedSettings`: An array of AVCaptureBracketedStillImageSettings objects (defined in AVCaptureStillImageOutput.h). All must be of the same type, either AVCaptureManualExposureBracketedStillImageSettings or AVCaptureAutoExposureBracketedStillImageSettings. bracketedSettings.count must be
2164        /// <
2165        /// = AVCapturePhotoOutput's -maxBracketedCapturePhotoCount.
2166        ///
2167        /// Returns: An instance of AVCapturePhotoBracketSettings.
2168        ///
2169        ///
2170        /// An NSInvalidArgumentException is thrown if bracketedSettings is nil, contains zero elements, or mixes and matches different subclasses of AVCaptureBracketedStillImageSettings.
2171        ///
2172        /// AVCapturePhotoBracketSettings do not support flashMode, autoStillImageStabilizationEnabled, livePhotoMovieFileURL or livePhotoMovieMetadata.
2173        ///
2174        /// # Safety
2175        ///
2176        /// `processed_format` generic should be of the correct type.
2177        #[unsafe(method(photoBracketSettingsWithRawPixelFormatType:processedFormat:bracketedSettings:))]
2178        #[unsafe(method_family = none)]
2179        pub unsafe fn photoBracketSettingsWithRawPixelFormatType_processedFormat_bracketedSettings(
2180            raw_pixel_format_type: OSType,
2181            processed_format: Option<&NSDictionary<NSString, AnyObject>>,
2182            bracketed_settings: &NSArray<AVCaptureBracketedStillImageSettings>,
2183        ) -> Retained<Self>;
2184
2185        #[cfg(all(feature = "AVCaptureStillImageOutput", feature = "AVMediaFormat"))]
2186        /// Creates an instance of AVCapturePhotoBracketSettings.
2187        ///
2188        ///
2189        /// Parameter `rawPixelFormatType`: One of the OSTypes contained in AVCapturePhotoOutput's -availableRawPhotoPixelFormatTypes array. May be set to 0 if you do not desire RAW capture.
2190        ///
2191        /// Parameter `rawFileType`: The file container for which the RAW image should be formatted to be written. Pass nil if you have no preferred file container. A default container will be chosen for you.
2192        ///
2193        /// Parameter `processedFormat`: A dictionary of Core Video pixel buffer attributes or AVVideoSettings, analogous to AVCaptureStillImageOutput's outputSettings property. If you wish an uncompressed format, your dictionary must contain kCVPixelBufferPixelFormatTypeKey, and the format specified must be present in AVCapturePhotoOutput's -availablePhotoPixelFormatTypes array. kCVPixelBufferPixelFormatTypeKey is the only supported key when expressing uncompressed output. If you wish a compressed format, your dictionary must contain AVVideoCodecKey and the codec specified must be present in AVCapturePhotoOutput's -availablePhotoCodecTypes array. If you are specifying a compressed format, the AVVideoCompressionPropertiesKey is also supported, with a payload dictionary containing a single AVVideoQualityKey. If you only wish to capture RAW, you may pass a non-zero rawPixelFormatType and a nil processedFormat dictionary. If you pass a rawPixelFormatType of 0 AND a nil processedFormat dictionary, the default output of AVVideoCodecTypeJPEG will be delivered.
2194        ///
2195        /// Parameter `processedFileType`: The file container for which the processed image should be formatted to be written. Pass nil if you have no preferred file container. A default container will be chosen for you.
2196        ///
2197        /// Parameter `bracketedSettings`: An array of AVCaptureBracketedStillImageSettings objects (defined in AVCaptureStillImageOutput.h). All must be of the same type, either AVCaptureManualExposureBracketedStillImageSettings or AVCaptureAutoExposureBracketedStillImageSettings. bracketedSettings.count must be
2198        /// <
2199        /// = AVCapturePhotoOutput's -maxBracketedCapturePhotoCount.
2200        ///
2201        /// Returns: An instance of AVCapturePhotoBracketSettings.
2202        ///
2203        ///
2204        /// An NSInvalidArgumentException is thrown if bracketedSettings is nil, contains zero elements, or mixes and matches different subclasses of AVCaptureBracketedStillImageSettings.
2205        ///
2206        /// AVCapturePhotoBracketSettings do not support flashMode, autoStillImageStabilizationEnabled, livePhotoMovieFileURL or livePhotoMovieMetadata.
2207        ///
2208        /// # Safety
2209        ///
2210        /// `processed_format` generic should be of the correct type.
2211        #[unsafe(method(photoBracketSettingsWithRawPixelFormatType:rawFileType:processedFormat:processedFileType:bracketedSettings:))]
2212        #[unsafe(method_family = none)]
2213        pub unsafe fn photoBracketSettingsWithRawPixelFormatType_rawFileType_processedFormat_processedFileType_bracketedSettings(
2214            raw_pixel_format_type: OSType,
2215            raw_file_type: Option<&AVFileType>,
2216            processed_format: Option<&NSDictionary<NSString, AnyObject>>,
2217            processed_file_type: Option<&AVFileType>,
2218            bracketed_settings: &NSArray<AVCaptureBracketedStillImageSettings>,
2219        ) -> Retained<Self>;
2220
2221        #[cfg(feature = "AVCaptureStillImageOutput")]
2222        /// An array of AVCaptureBracketedStillImageSettings objects you passed in -initWithFormat:rawPixelFormatType:bracketedSettings:
2223        ///
2224        ///
2225        /// This read-only property never returns nil.
2226        #[unsafe(method(bracketedSettings))]
2227        #[unsafe(method_family = none)]
2228        pub unsafe fn bracketedSettings(
2229            &self,
2230        ) -> Retained<NSArray<AVCaptureBracketedStillImageSettings>>;
2231
2232        /// Specifies whether lens (optical) stabilization should be employed during the bracketed capture.
2233        ///
2234        ///
2235        /// Default value is NO. This property may only be set to YES if AVCapturePhotoOutput's isLensStabilizationDuringBracketedCaptureSupported is YES. When set to YES, AVCapturePhotoOutput holds the lens steady for the duration of the bracket to counter hand shake and produce a sharper bracket of images.
2236        #[unsafe(method(isLensStabilizationEnabled))]
2237        #[unsafe(method_family = none)]
2238        pub unsafe fn isLensStabilizationEnabled(&self) -> bool;
2239
2240        /// Setter for [`isLensStabilizationEnabled`][Self::isLensStabilizationEnabled].
2241        #[unsafe(method(setLensStabilizationEnabled:))]
2242        #[unsafe(method_family = none)]
2243        pub unsafe fn setLensStabilizationEnabled(&self, lens_stabilization_enabled: bool);
2244    );
2245}
2246
2247/// Methods declared on superclass `AVCapturePhotoSettings`.
2248impl AVCapturePhotoBracketSettings {
2249    extern_methods!(
2250        /// Creates a default instance of AVCapturePhotoSettings.
2251        ///
2252        ///
2253        /// Returns: An instance of AVCapturePhotoSettings.
2254        ///
2255        ///
2256        /// A default AVCapturePhotoSettings object has a format of AVVideoCodecTypeJPEG, a fileType of AVFileTypeJPEG, and photoQualityPrioritization set to AVCapturePhotoQualityPrioritizationBalanced.
2257        #[unsafe(method(photoSettings))]
2258        #[unsafe(method_family = none)]
2259        pub unsafe fn photoSettings() -> Retained<Self>;
2260
2261        /// Creates an instance of AVCapturePhotoSettings with a user-specified output format.
2262        ///
2263        ///
2264        /// Parameter `format`: A dictionary of Core Video pixel buffer attributes or AVVideoSettings, analogous to AVCaptureStillImageOutput's outputSettings property.
2265        ///
2266        /// Returns: An instance of AVCapturePhotoSettings.
2267        ///
2268        ///
2269        /// If you wish an uncompressed format, your dictionary must contain kCVPixelBufferPixelFormatTypeKey, and the format specified must be present in AVCapturePhotoOutput's -availablePhotoPixelFormatTypes array. kCVPixelBufferPixelFormatTypeKey is the only supported key when expressing uncompressed output. If you wish a compressed format, your dictionary must contain AVVideoCodecKey and the codec specified must be present in AVCapturePhotoOutput's -availablePhotoCodecTypes array. If you are specifying a compressed format, the AVVideoCompressionPropertiesKey is also supported, with a payload dictionary containing a single AVVideoQualityKey. Passing a nil format dictionary is analogous to calling +photoSettings.
2270        ///
2271        /// # Safety
2272        ///
2273        /// `format` generic should be of the correct type.
2274        #[unsafe(method(photoSettingsWithFormat:))]
2275        #[unsafe(method_family = none)]
2276        pub unsafe fn photoSettingsWithFormat(
2277            format: Option<&NSDictionary<NSString, AnyObject>>,
2278        ) -> Retained<Self>;
2279
2280        /// Creates an instance of AVCapturePhotoSettings specifying RAW only output.
2281        ///
2282        ///
2283        /// Parameter `rawPixelFormatType`: A Bayer RAW or Apple ProRAW pixel format OSType (defined in CVPixelBuffer.h).
2284        ///
2285        /// Returns: An instance of AVCapturePhotoSettings.
2286        ///
2287        ///
2288        /// rawPixelFormatType must be one of the OSTypes contained in AVCapturePhotoOutput's -availableRawPhotoPixelFormatTypes array. See AVCapturePhotoOutput's -capturePhotoWithSettings:delegate: inline documentation for a discussion of restrictions on AVCapturePhotoSettings when requesting RAW capture.
2289        #[unsafe(method(photoSettingsWithRawPixelFormatType:))]
2290        #[unsafe(method_family = none)]
2291        pub unsafe fn photoSettingsWithRawPixelFormatType(
2292            raw_pixel_format_type: OSType,
2293        ) -> Retained<Self>;
2294
2295        /// Creates an instance of AVCapturePhotoSettings specifying RAW + a processed format (such as JPEG).
2296        ///
2297        ///
2298        /// Parameter `rawPixelFormatType`: A Bayer RAW or Apple ProRAW pixel format OSType (defined in CVPixelBuffer.h).
2299        ///
2300        /// Parameter `processedFormat`: A dictionary of Core Video pixel buffer attributes or AVVideoSettings, analogous to AVCaptureStillImageOutput's outputSettings property.
2301        ///
2302        /// Returns: An instance of AVCapturePhotoSettings.
2303        ///
2304        ///
2305        /// rawPixelFormatType must be one of the OSTypes contained in AVCapturePhotoOutput's -availableRawPhotoPixelFormatTypes array. If you wish an uncompressed processedFormat, your dictionary must contain kCVPixelBufferPixelFormatTypeKey, and the processedFormat specified must be present in AVCapturePhotoOutput's -availablePhotoPixelFormatTypes array. kCVPixelBufferPixelFormatTypeKey is the only supported key when expressing uncompressed processedFormat. If you wish a compressed format, your dictionary must contain AVVideoCodecKey and the codec specified must be present in AVCapturePhotoOutput's -availablePhotoCodecTypes array. If you are specifying a compressed format, the AVVideoCompressionPropertiesKey is also supported, with a payload dictionary containing a single AVVideoQualityKey. Passing a nil processedFormat dictionary is analogous to calling +photoSettingsWithRawPixelFormatType:. See AVCapturePhotoOutput's -capturePhotoWithSettings:delegate: inline documentation for a discussion of restrictions on AVCapturePhotoSettings when requesting RAW capture.
2306        ///
2307        /// # Safety
2308        ///
2309        /// `processed_format` generic should be of the correct type.
2310        #[unsafe(method(photoSettingsWithRawPixelFormatType:processedFormat:))]
2311        #[unsafe(method_family = none)]
2312        pub unsafe fn photoSettingsWithRawPixelFormatType_processedFormat(
2313            raw_pixel_format_type: OSType,
2314            processed_format: Option<&NSDictionary<NSString, AnyObject>>,
2315        ) -> Retained<Self>;
2316
2317        #[cfg(feature = "AVMediaFormat")]
2318        /// Creates an instance of AVCapturePhotoSettings specifying RAW + a processed format (such as JPEG) and a file container to which it will be written.
2319        ///
2320        ///
2321        /// Parameter `rawPixelFormatType`: A Bayer RAW or Apple ProRAW pixel format OSType (defined in CVPixelBuffer.h). Pass 0 if you do not desire a RAW photo callback.
2322        ///
2323        /// Parameter `rawFileType`: The file container for which the RAW image should be formatted to be written. Pass nil if you have no preferred file container. A default container will be chosen for you.
2324        ///
2325        /// Parameter `processedFormat`: A dictionary of Core Video pixel buffer attributes or AVVideoSettings, analogous to AVCaptureStillImageOutput's outputSettings property. Pass nil if you do not desire a processed photo callback.
2326        ///
2327        /// Parameter `processedFileType`: The file container for which the processed image should be formatted to be written. Pass nil if you have no preferred file container. A default container will be chosen for you.
2328        ///
2329        /// Returns: An instance of AVCapturePhotoSettings.
2330        ///
2331        ///
2332        /// rawPixelFormatType must be one of the OSTypes contained in AVCapturePhotoOutput's -availableRawPhotoPixelFormatTypes array. Set rawPixelFormatType to 0 if you do not desire a RAW photo callback. If you are specifying a rawFileType, it must be present in AVCapturePhotoOutput's -availableRawPhotoFileTypes array. If you wish an uncompressed processedFormat, your dictionary must contain kCVPixelBufferPixelFormatTypeKey, and the processedFormat specified must be present in AVCapturePhotoOutput's -availablePhotoPixelFormatTypes array. kCVPixelBufferPixelFormatTypeKey is the only supported key when expressing uncompressed processedFormat. If you wish a compressed format, your dictionary must contain AVVideoCodecKey and the codec specified must be present in AVCapturePhotoOutput's -availablePhotoCodecTypes array. If you are specifying a compressed format, the AVVideoCompressionPropertiesKey is also supported, with a payload dictionary containing a single AVVideoQualityKey. If you are specifying a processedFileType (such as AVFileTypeJPEG, AVFileTypeHEIC or AVFileTypeDICOM), it must be present in AVCapturePhotoOutput's -availablePhotoFileTypes array. Pass a nil processedFormat dictionary if you only desire a RAW photo capture. See AVCapturePhotoOutput's -capturePhotoWithSettings:delegate: inline documentation for a discussion of restrictions on AVCapturePhotoSettings when requesting RAW capture.
2333        ///
2334        /// # Safety
2335        ///
2336        /// `processed_format` generic should be of the correct type.
2337        #[unsafe(method(photoSettingsWithRawPixelFormatType:rawFileType:processedFormat:processedFileType:))]
2338        #[unsafe(method_family = none)]
2339        pub unsafe fn photoSettingsWithRawPixelFormatType_rawFileType_processedFormat_processedFileType(
2340            raw_pixel_format_type: OSType,
2341            raw_file_type: Option<&AVFileType>,
2342            processed_format: Option<&NSDictionary<NSString, AnyObject>>,
2343            processed_file_type: Option<&AVFileType>,
2344        ) -> Retained<Self>;
2345
2346        /// Creates an instance of AVCapturePhotoSettings with a new uniqueID from an existing instance of AVCapturePhotoSettings.
2347        ///
2348        ///
2349        /// Parameter `photoSettings`: An existing AVCapturePhotoSettings instance.
2350        ///
2351        /// Returns: An new instance of AVCapturePhotoSettings with new uniqueID.
2352        ///
2353        ///
2354        /// Use this factory method to create a clone of an existing photo settings instance, but with a new uniqueID that can safely be passed to AVCapturePhotoOutput -capturePhotoWithSettings:delegate:.
2355        #[unsafe(method(photoSettingsFromPhotoSettings:))]
2356        #[unsafe(method_family = none)]
2357        pub unsafe fn photoSettingsFromPhotoSettings(
2358            photo_settings: &AVCapturePhotoSettings,
2359        ) -> Retained<Self>;
2360    );
2361}
2362
2363/// Methods declared on superclass `NSObject`.
2364impl AVCapturePhotoBracketSettings {
2365    extern_methods!(
2366        #[unsafe(method(init))]
2367        #[unsafe(method_family = init)]
2368        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
2369
2370        #[unsafe(method(new))]
2371        #[unsafe(method_family = new)]
2372        pub unsafe fn new() -> Retained<Self>;
2373    );
2374}
2375
2376extern_class!(
2377    /// An immutable object produced by callbacks in each and every AVCapturePhotoCaptureDelegate protocol method.
2378    ///
2379    ///
2380    /// When you initiate a photo capture request using -capturePhotoWithSettings:delegate:, some of your settings are not yet certain. For instance, auto flash and auto still image stabilization allow the AVCapturePhotoOutput to decide just in time whether to employ flash or still image stabilization, depending on the current scene. Once the request is issued, AVCapturePhotoOutput begins the capture, resolves the uncertain settings, and in its first callback informs you of its choices through an AVCaptureResolvedPhotoSettings object. This same object is presented to all the callbacks fired for a particular photo capture request. Its uniqueID property matches that of the AVCapturePhotoSettings instance you used to initiate the photo request.
2381    ///
2382    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcaptureresolvedphotosettings?language=objc)
2383    #[unsafe(super(NSObject))]
2384    #[derive(Debug, PartialEq, Eq, Hash)]
2385    pub struct AVCaptureResolvedPhotoSettings;
2386);
2387
2388extern_conformance!(
2389    unsafe impl NSObjectProtocol for AVCaptureResolvedPhotoSettings {}
2390);
2391
2392impl AVCaptureResolvedPhotoSettings {
2393    extern_methods!(
2394        #[unsafe(method(init))]
2395        #[unsafe(method_family = init)]
2396        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
2397
2398        #[unsafe(method(new))]
2399        #[unsafe(method_family = new)]
2400        pub unsafe fn new() -> Retained<Self>;
2401
2402        /// uniqueID matches that of the AVCapturePhotoSettings instance you passed to -capturePhotoWithSettings:delegate:.
2403        #[unsafe(method(uniqueID))]
2404        #[unsafe(method_family = none)]
2405        pub unsafe fn uniqueID(&self) -> i64;
2406
2407        #[cfg(feature = "objc2-core-media")]
2408        /// The resolved dimensions of the photo buffer that will be delivered to the -captureOutput:didFinishProcessingPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error: callback.
2409        ///
2410        ///
2411        /// If you request a RAW capture with no processed companion image, photoDimensions resolve to { 0, 0 }.
2412        #[unsafe(method(photoDimensions))]
2413        #[unsafe(method_family = none)]
2414        pub unsafe fn photoDimensions(&self) -> CMVideoDimensions;
2415
2416        #[cfg(feature = "objc2-core-media")]
2417        /// The resolved dimensions of the RAW photo buffer that will be delivered to the -captureOutput:didFinishProcessingRawPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error: callback.
2418        ///
2419        ///
2420        /// If you request a non-RAW capture, rawPhotoDimensions resolve to { 0, 0 }.
2421        #[unsafe(method(rawPhotoDimensions))]
2422        #[unsafe(method_family = none)]
2423        pub unsafe fn rawPhotoDimensions(&self) -> CMVideoDimensions;
2424
2425        #[cfg(feature = "objc2-core-media")]
2426        /// The resolved dimensions of the preview photo buffer that will be delivered to the -captureOutput:didFinishProcessing{Photo | RawPhoto}... AVCapturePhotoCaptureDelegate callbacks.
2427        ///
2428        ///
2429        /// If you don't request a preview image, previewDimensions resolve to { 0, 0 }.
2430        #[unsafe(method(previewDimensions))]
2431        #[unsafe(method_family = none)]
2432        pub unsafe fn previewDimensions(&self) -> CMVideoDimensions;
2433
2434        #[cfg(feature = "objc2-core-media")]
2435        /// The resolved dimensions of the embedded thumbnail that will be written to the processed photo delivered to the -captureOutput:didFinishProcessingPhoto:error: AVCapturePhotoCaptureDelegate callback.
2436        ///
2437        ///
2438        /// If you don't request an embedded thumbnail image, embeddedThumbnailDimensions resolve to { 0, 0 }.
2439        #[unsafe(method(embeddedThumbnailDimensions))]
2440        #[unsafe(method_family = none)]
2441        pub unsafe fn embeddedThumbnailDimensions(&self) -> CMVideoDimensions;
2442
2443        #[cfg(feature = "objc2-core-media")]
2444        /// The resolved dimensions of the embedded thumbnail that will be written to the RAW photo delivered to the -captureOutput:didFinishProcessingPhoto:error: AVCapturePhotoCaptureDelegate callback.
2445        ///
2446        ///
2447        /// If you don't request a raw embedded thumbnail image, rawEmbeddedThumbnailDimensions resolve to { 0, 0 }.
2448        #[unsafe(method(rawEmbeddedThumbnailDimensions))]
2449        #[unsafe(method_family = none)]
2450        pub unsafe fn rawEmbeddedThumbnailDimensions(&self) -> CMVideoDimensions;
2451
2452        #[cfg(feature = "objc2-core-media")]
2453        /// The resolved dimensions of the portrait effects matte that will be delivered to the AVCapturePhoto in the -captureOutput:didFinishProcessingPhoto:error: AVCapturePhotoCaptureDelegate callback.
2454        ///
2455        ///
2456        /// If you request a portrait effects matte by calling -[AVCapturePhotoSettings setPortraitEffectsMatteDeliveryEnabled:YES], portraitEffectsMatteDimensions resolve to the expected dimensions of the portrait effects matte, assuming one is generated (see -[AVCapturePhotoSettings portraitEffectsMatteDeliveryEnabled] for a discussion of why a portrait effects matte might not be delivered). If you don't request a portrait effects matte, portraitEffectsMatteDimensions always resolve to { 0, 0 }.
2457        #[unsafe(method(portraitEffectsMatteDimensions))]
2458        #[unsafe(method_family = none)]
2459        pub unsafe fn portraitEffectsMatteDimensions(&self) -> CMVideoDimensions;
2460
2461        #[cfg(all(feature = "AVSemanticSegmentationMatte", feature = "objc2-core-media"))]
2462        /// Queries the resolved dimensions of semantic segmentation mattes that will be delivered to the AVCapturePhoto in the -captureOutput:didFinishProcessingPhoto:error: AVCapturePhotoCaptureDelegate callback.
2463        ///
2464        ///
2465        /// If you request semantic segmentation mattes by calling -[AVCapturePhotoSettings setEnabledSemanticSegmentationMatteTypes:] with a non-empty array, the dimensions resolve to the expected dimensions for each of the mattes, assuming they are generated (see -[AVCapturePhotoSettings enabledSemanticSegmentationMatteTypes] for a discussion of why a particular matte might not be delivered). If you don't request any semantic segmentation mattes, the result will always be { 0, 0 }.
2466        #[unsafe(method(dimensionsForSemanticSegmentationMatteOfType:))]
2467        #[unsafe(method_family = none)]
2468        pub unsafe fn dimensionsForSemanticSegmentationMatteOfType(
2469            &self,
2470            semantic_segmentation_matte_type: &AVSemanticSegmentationMatteType,
2471        ) -> CMVideoDimensions;
2472
2473        #[cfg(feature = "objc2-core-media")]
2474        /// The resolved dimensions of the video track in the movie that will be delivered to the -captureOutput:didFinishProcessingLivePhotoToMovieFileAtURL:duration:photoDisplayTime:resolvedSettings:error: callback.
2475        ///
2476        ///
2477        /// If you don't request Live Photo capture, livePhotoMovieDimensions resolve to { 0, 0 }.
2478        #[unsafe(method(livePhotoMovieDimensions))]
2479        #[unsafe(method_family = none)]
2480        pub unsafe fn livePhotoMovieDimensions(&self) -> CMVideoDimensions;
2481
2482        /// Indicates whether the flash will fire when capturing the photo.
2483        ///
2484        ///
2485        /// When you specify AVCaptureFlashModeAuto as your AVCapturePhotoSettings.flashMode, you don't know if flash capture will be chosen until you inspect the AVCaptureResolvedPhotoSettings flashEnabled property. If the device becomes too hot, the flash becomes temporarily unavailable. You can key-value observe AVCaptureDevice's flashAvailable property to know when this occurs. If the flash is unavailable due to thermal issues, and you specify a flashMode of AVCaptureFlashModeOn, flashEnabled still resolves to NO until the device has sufficiently cooled off.
2486        #[unsafe(method(isFlashEnabled))]
2487        #[unsafe(method_family = none)]
2488        pub unsafe fn isFlashEnabled(&self) -> bool;
2489
2490        /// Indicates whether red-eye reduction will be applied as necessary when capturing the photo if flashEnabled is YES.
2491        #[unsafe(method(isRedEyeReductionEnabled))]
2492        #[unsafe(method_family = none)]
2493        pub unsafe fn isRedEyeReductionEnabled(&self) -> bool;
2494
2495        #[cfg(feature = "objc2-core-media")]
2496        /// The resolved dimensions of the AVCaptureDeferredPhotoProxy when opting in to deferred photo delivery.  See AVCaptureDeferredPhotoProxy.
2497        ///
2498        ///
2499        /// If you don't opt in to deferred photo delivery, deferredPhotoProxyDimensions resolve to { 0, 0 }.  When an AVCaptureDeferredPhotoProxy is returned, the photoDimensions property of this object represents the dimensions of the final photo.
2500        #[unsafe(method(deferredPhotoProxyDimensions))]
2501        #[unsafe(method_family = none)]
2502        pub unsafe fn deferredPhotoProxyDimensions(&self) -> CMVideoDimensions;
2503
2504        /// Indicates whether still image stabilization will be employed when capturing the photo.
2505        ///
2506        /// As of iOS 13 hardware, the AVCapturePhotoOutput is capable of applying a variety of multi-image fusion techniques to improve photo quality (reduce noise, preserve detail in low light, freeze motion, etc), all of which have been previously lumped under the stillImageStabilization moniker. This property should no longer be used as it no longer provides meaningful information about the techniques used to improve quality in a photo capture. Instead, you should use -photoQualityPrioritization to indicate your preferred quality vs speed when configuring your AVCapturePhotoSettings. You may query -photoProcessingTimeRange to get an indication of how long the photo will take to process before delivery to your delegate.
2507        #[deprecated]
2508        #[unsafe(method(isStillImageStabilizationEnabled))]
2509        #[unsafe(method_family = none)]
2510        pub unsafe fn isStillImageStabilizationEnabled(&self) -> bool;
2511
2512        /// Indicates whether fusion of virtual device constituent camera images will be used when capturing the photo, such as the wide-angle and telephoto images on a DualCamera.
2513        #[unsafe(method(isVirtualDeviceFusionEnabled))]
2514        #[unsafe(method_family = none)]
2515        pub unsafe fn isVirtualDeviceFusionEnabled(&self) -> bool;
2516
2517        /// Indicates whether DualCamera wide-angle and telephoto image fusion will be employed when capturing the photo. As of iOS 13, this property is deprecated in favor of virtualDeviceFusionEnabled.
2518        #[deprecated]
2519        #[unsafe(method(isDualCameraFusionEnabled))]
2520        #[unsafe(method_family = none)]
2521        pub unsafe fn isDualCameraFusionEnabled(&self) -> bool;
2522
2523        /// Indicates the number of times your -captureOutput:didFinishProcessingPhoto:error: callback will be called. For instance, if you've requested an auto exposure bracket of 3 with JPEG and RAW, the expectedPhotoCount is 6.
2524        #[unsafe(method(expectedPhotoCount))]
2525        #[unsafe(method_family = none)]
2526        pub unsafe fn expectedPhotoCount(&self) -> NSUInteger;
2527
2528        #[cfg(feature = "objc2-core-media")]
2529        /// Indicates the processing time range you can expect for this photo to be delivered to your delegate. the .start field of the CMTimeRange is zero-based. In other words, if photoProcessingTimeRange.start is equal to .5 seconds, then the minimum processing time for this photo is .5 seconds. The .start field plus the .duration field of the CMTimeRange indicate the max expected processing time for this photo. Consider implementing a UI affordance if the max processing time is uncomfortably long.
2530        #[unsafe(method(photoProcessingTimeRange))]
2531        #[unsafe(method_family = none)]
2532        pub unsafe fn photoProcessingTimeRange(&self) -> CMTimeRange;
2533
2534        /// Indicates whether content aware distortion correction will be employed when capturing the photo.
2535        #[unsafe(method(isContentAwareDistortionCorrectionEnabled))]
2536        #[unsafe(method_family = none)]
2537        pub unsafe fn isContentAwareDistortionCorrectionEnabled(&self) -> bool;
2538
2539        /// Indicates whether fast capture prioritization will be employed when capturing the photo.
2540        #[unsafe(method(isFastCapturePrioritizationEnabled))]
2541        #[unsafe(method_family = none)]
2542        pub unsafe fn isFastCapturePrioritizationEnabled(&self) -> bool;
2543    );
2544}
2545
2546extern_class!(
2547    /// An object representing a photo in memory, produced by the -captureOutput:didFinishingProcessingPhoto:error: in the AVCapturePhotoCaptureDelegate protocol method.
2548    ///
2549    ///
2550    /// Beginning in iOS 11, AVCapturePhotoOutput's AVCapturePhotoCaptureDelegate supports a simplified callback for delivering image data, namely -captureOutput:didFinishingProcessingPhoto:error:. This callback presents each image result for your capture request as an AVCapturePhoto object, an immutable wrapper from which various properties of the photo capture may be queried, such as the photo's preview pixel buffer, metadata, depth data, camera calibration data, and image bracket specific properties. AVCapturePhoto can wrap file-containerized photo results, such as HEVC encoded image data, containerized in the HEIC file format. CMSampleBufferRef, on the other hand, may only be used to express non file format containerized photo data. For this reason, the AVCapturePhotoCaptureDelegate protocol methods that return CMSampleBuffers have been deprecated in favor of -captureOutput:didFinishingProcessingPhoto:error:. A AVCapturePhoto wraps a single image result. For instance, if you've requested a bracketed capture of 3 images, your callback is called 3 times, each time delivering an AVCapturePhoto.
2551    ///
2552    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturephoto?language=objc)
2553    #[unsafe(super(NSObject))]
2554    #[derive(Debug, PartialEq, Eq, Hash)]
2555    pub struct AVCapturePhoto;
2556);
2557
2558extern_conformance!(
2559    unsafe impl NSObjectProtocol for AVCapturePhoto {}
2560);
2561
2562impl AVCapturePhoto {
2563    extern_methods!(
2564        #[unsafe(method(init))]
2565        #[unsafe(method_family = init)]
2566        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
2567
2568        #[unsafe(method(new))]
2569        #[unsafe(method_family = new)]
2570        pub unsafe fn new() -> Retained<Self>;
2571
2572        #[cfg(feature = "objc2-core-media")]
2573        /// The time at which this image was captured, synchronized to the synchronizationClock of the AVCaptureSession
2574        ///
2575        ///
2576        /// The timestamp property indicates the time the image was captured, and is analogous to CMSampleBufferGetPresentationTimeStamp(). If an error was provided in the -captureOutput:didFinishingProcessingPhoto:error: callback, timestamp returns kCMTimeInvalid.
2577        #[unsafe(method(timestamp))]
2578        #[unsafe(method_family = none)]
2579        pub unsafe fn timestamp(&self) -> CMTime;
2580
2581        /// This property returns YES if this photo is a RAW image.
2582        ///
2583        ///
2584        /// Your AVCapturePhotoCaptureDelegate's -captureOutput:didFinishingProcessingPhoto:error: method may be called one or more times with image results, including RAW or non-RAW images. This property distinguishes RAW from non-RAW image results, for instance, if you've requested a RAW + JPEG capture.
2585        #[unsafe(method(isRawPhoto))]
2586        #[unsafe(method_family = none)]
2587        pub unsafe fn isRawPhoto(&self) -> bool;
2588
2589        #[cfg(feature = "objc2-core-video")]
2590        /// For uncompressed or RAW captures, this property offers access to the pixel data.
2591        ///
2592        ///
2593        /// Uncompressed captures, such as '420f' or 'BGRA', Bayer RAW captures, such as 'bgg4', or Apple ProRAW captures, such as 'l64r', present pixel data as a CVPixelBuffer. See AVCapturePhotoOutput's -appleProRAWEnabled for a discussion on the differences between Bayer RAW and Apple ProRAW. This property is analogous to CMSampleBufferGetImageBuffer(). The pixel buffer contains only the minimal attachments required for correct display. Compressed captures, such as 'jpeg', return nil.
2594        #[unsafe(method(pixelBuffer))]
2595        #[unsafe(method_family = none)]
2596        pub unsafe fn pixelBuffer(&self) -> Option<Retained<CVPixelBuffer>>;
2597
2598        #[cfg(feature = "objc2-core-video")]
2599        /// This property offers access to the preview image pixel data if you've requested it.
2600        ///
2601        ///
2602        /// If you requested a preview image by calling -[AVCapturePhotoSettings setPreviewPhotoFormat:] with a non-nil value, this property offers access to the resulting preview image pixel data, and is analogous to CMSampleBufferGetImageBuffer(). The pixel buffer contains only the minimal attachments required for correct display. Nil is returned if you did not request a preview image.
2603        #[unsafe(method(previewPixelBuffer))]
2604        #[unsafe(method_family = none)]
2605        pub unsafe fn previewPixelBuffer(&self) -> Option<Retained<CVPixelBuffer>>;
2606
2607        /// The format of the embedded thumbnail contained in this AVCapturePhoto.
2608        ///
2609        ///
2610        /// If you requested an embedded thumbnail image by calling -[AVCapturePhotoSettings setEmbeddedThumbnailPhotoFormat:] with a non-nil value, this property offers access to the resolved embedded thumbnail AVVideoSettings dictionary. Nil is returned if you did not request an embedded thumbnail image.
2611        #[unsafe(method(embeddedThumbnailPhotoFormat))]
2612        #[unsafe(method_family = none)]
2613        pub unsafe fn embeddedThumbnailPhotoFormat(
2614            &self,
2615        ) -> Option<Retained<NSDictionary<NSString, AnyObject>>>;
2616
2617        #[cfg(feature = "AVDepthData")]
2618        /// An AVDepthData object wrapping a disparity/depth map associated with this photo.
2619        ///
2620        ///
2621        /// If you requested depth data delivery by calling -[AVCapturePhotoSettings setDepthDataDeliveryEnabled:YES], this property offers access to the resulting AVDepthData object. Nil is returned if you did not request depth data delivery. Note that the depth data is only embedded in the photo's internal file format container if you set -[AVCapturePhotoSettings setEmbedsDepthDataInPhoto:YES].
2622        #[unsafe(method(depthData))]
2623        #[unsafe(method_family = none)]
2624        pub unsafe fn depthData(&self) -> Option<Retained<AVDepthData>>;
2625
2626        #[cfg(feature = "AVPortraitEffectsMatte")]
2627        /// An AVPortraitEffectsMatte object wrapping a matte associated with this photo.
2628        ///
2629        ///
2630        /// If you requested portrait effects matte delivery by calling -[AVCapturePhotoSettings setPortraitEffectsMatteDeliveryEnabled:YES], this property offers access to the resulting AVPortraitEffectsMatte object. Nil is returned if you did not request portrait effects matte delivery. Note that the portrait effects matte is only embedded in the photo's internal file format container if you set -[AVCapturePhotoSettings setEmbedsPortraitEffectsMatteInPhoto:YES].
2631        #[unsafe(method(portraitEffectsMatte))]
2632        #[unsafe(method_family = none)]
2633        pub unsafe fn portraitEffectsMatte(&self) -> Option<Retained<AVPortraitEffectsMatte>>;
2634
2635        #[cfg(feature = "AVSemanticSegmentationMatte")]
2636        /// An accessor for semantic segmentation mattes associated with this photo.
2637        ///
2638        ///
2639        /// Parameter `semanticSegmentationMatteType`: The matte type of interest (hair, skin, etc).
2640        ///
2641        /// Returns: An instance of AVSemanticSegmentationMatte, or nil if none could be found for the specified type.
2642        ///
2643        ///
2644        /// If you requested one or more semantic segmentation mattes by calling -[AVCapturePhotoSettings setEnabledSemanticSegmentationMatteTypes:] with a non-empty array of types, this property offers access to the resulting AVSemanticSegmentationMatte objects. Nil is returned if you did not request semantic segmentation matte delivery, or if no mattes of the specified type are available. Note that semantic segmentation mattes are only embedded in the photo's internal file format container if you call -[AVCapturePhotoSettings setEmbedsSemanticSegmentationMattesInPhoto:YES].
2645        #[unsafe(method(semanticSegmentationMatteForType:))]
2646        #[unsafe(method_family = none)]
2647        pub unsafe fn semanticSegmentationMatteForType(
2648            &self,
2649            semantic_segmentation_matte_type: &AVSemanticSegmentationMatteType,
2650        ) -> Option<Retained<AVSemanticSegmentationMatte>>;
2651
2652        /// An ImageIO property style dictionary of metadata associated with this photo.
2653        ///
2654        ///
2655        /// Valid metadata keys are found in
2656        /// <ImageIO
2657        /// /CGImageProperties.h>, such as kCGImagePropertyOrientation, kCGImagePropertyExifDictionary, kCGImagePropertyMakerAppleDictionary, etc.
2658        #[unsafe(method(metadata))]
2659        #[unsafe(method_family = none)]
2660        pub unsafe fn metadata(&self) -> Retained<NSDictionary<NSString, AnyObject>>;
2661
2662        #[cfg(feature = "AVCameraCalibrationData")]
2663        /// An AVCameraCalibrationData object representing the calibration information for the camera providing the photo.
2664        ///
2665        ///
2666        /// Camera calibration data is only present if you set AVCapturePhotoSettings.setCameraCalibrationDataDeliveryEnabled to YES. When requesting virtual device constituent photo delivery plus cameraCalibrationDataDeliveryEnabled, camera calibration information is delivered with all resultant photos and is specific to the constituent device producing that photo.
2667        #[unsafe(method(cameraCalibrationData))]
2668        #[unsafe(method_family = none)]
2669        pub unsafe fn cameraCalibrationData(&self) -> Option<Retained<AVCameraCalibrationData>>;
2670
2671        /// The AVCaptureResolvedPhotoSettings associated with all photo results for a given -[AVCapturePhotoOutput capturePhotoWithSettings:delegate:] request.
2672        ///
2673        ///
2674        /// Even in the event of an error, the resolved settings are always non nil.
2675        #[unsafe(method(resolvedSettings))]
2676        #[unsafe(method_family = none)]
2677        pub unsafe fn resolvedSettings(&self) -> Retained<AVCaptureResolvedPhotoSettings>;
2678
2679        /// This photo's index (1-based) in the total expected photo count.
2680        ///
2681        ///
2682        /// The resolvedSettings.expectedPhotoCount property indicates the total number of images that will be returned for a given capture request. This property indicates this photo's index (1-based). When you receive a -captureOutput:didFinishProcessingPhoto:error: callback with a photo whose photoCount matches resolvedSettings.expectedPhotoCount, you know you've received the last one for the given capture request.
2683        #[unsafe(method(photoCount))]
2684        #[unsafe(method_family = none)]
2685        pub unsafe fn photoCount(&self) -> NSInteger;
2686
2687        #[cfg(feature = "AVCaptureDevice")]
2688        /// The device type of the source camera providing the photo.
2689        ///
2690        ///
2691        /// When taking a virtual device constituent photo capture, you may query this property to find out the source type of the photo. For instance, on a DualCamera, resulting photos will be of sourceDeviceType AVCaptureDeviceTypeBuiltInWideCamera, or AVCaptureDeviceTypeBuiltInTelephotoCamera. For all other types of capture, the source device type is equal to the -[AVCaptureDevice deviceType] of the AVCaptureDevice to which the AVCapturePhotoOutput is connected. Returns nil if the source of the photo is not an AVCaptureDevice.
2692        #[unsafe(method(sourceDeviceType))]
2693        #[unsafe(method_family = none)]
2694        pub unsafe fn sourceDeviceType(&self) -> Option<Retained<AVCaptureDeviceType>>;
2695
2696        #[cfg(feature = "objc2-core-video")]
2697        /// Returns a pixel buffer with the same aspect ratio as the constant color photo, where each pixel value (unsigned 8-bit integer) indicates how fully the constant color effect has been achieved in the corresponding region of the constant color photo -- 255 means full confidence, 0 means zero confidence.
2698        ///
2699        ///
2700        /// NULL is returned for any non constant color photos.
2701        #[unsafe(method(constantColorConfidenceMap))]
2702        #[unsafe(method_family = none)]
2703        pub unsafe fn constantColorConfidenceMap(&self) -> Option<Retained<CVPixelBuffer>>;
2704
2705        /// Returns a score summarizing the overall confidence level of a constant color photo -- 1.0 means full confidence, 0.0 means zero confidence.
2706        ///
2707        ///
2708        /// Default is 0.0.
2709        ///
2710        /// In most use cases (document scanning for example), the central region of the photo is considered more important than the peripherals, therefore the confidence level of the central pixels are weighted more heavily than pixels on the edges of the photo.
2711        ///
2712        /// Use constantColorConfidenceMap for more use case specific analyses of the confidence level.
2713        #[unsafe(method(constantColorCenterWeightedMeanConfidenceLevel))]
2714        #[unsafe(method_family = none)]
2715        pub unsafe fn constantColorCenterWeightedMeanConfidenceLevel(&self) -> c_float;
2716
2717        /// Indicates whether this photo is a fallback photo for a constant color capture.
2718        #[unsafe(method(isConstantColorFallbackPhoto))]
2719        #[unsafe(method_family = none)]
2720        pub unsafe fn isConstantColorFallbackPhoto(&self) -> bool;
2721    );
2722}
2723
2724/// AVCapturePhotoConversions.
2725impl AVCapturePhoto {
2726    extern_methods!(
2727        /// Flattens the AVCapturePhoto to an NSData using the file container format (processedFileType or rawFileType) specified in the AVCapturePhotoSettings (e.g. JFIF, HEIF, DNG, DICOM).
2728        ///
2729        ///
2730        /// Returns: An NSData containing bits in the file container's format, or nil if the flattening process fails.
2731        #[unsafe(method(fileDataRepresentation))]
2732        #[unsafe(method_family = none)]
2733        pub unsafe fn fileDataRepresentation(&self) -> Option<Retained<NSData>>;
2734
2735        /// Flattens the AVCapturePhoto to an NSData using the file container format (processedFileType or rawFileType) specified in the AVCapturePhotoSettings (e.g. JFIF, HEIF, DNG, DICOM), and allows you to strip or replace various pieces of metadata in the process.
2736        ///
2737        ///
2738        /// Parameter `customizer`: An object conforming to the AVCapturePhotoFileDataRepresentationCustomizer protocol that will be called synchronously to provide customization of metadata written to the container format. An NSInvalidArgumentException is thrown if you pass nil.
2739        ///
2740        /// Returns: An NSData containing bits in the file container's format, or nil if the flattening process fails.
2741        #[unsafe(method(fileDataRepresentationWithCustomizer:))]
2742        #[unsafe(method_family = none)]
2743        pub unsafe fn fileDataRepresentationWithCustomizer(
2744            &self,
2745            customizer: &ProtocolObject<dyn AVCapturePhotoFileDataRepresentationCustomizer>,
2746        ) -> Option<Retained<NSData>>;
2747
2748        #[cfg(all(feature = "AVDepthData", feature = "objc2-core-video"))]
2749        /// Flattens the AVCapturePhoto to an NSData using the file container format (processedFileType or rawFileType) specified in the AVCapturePhotoSettings (e.g. JFIF, HEIF, DNG, DICOM), and allows you to replace metadata, thumbnail, and depth data in the process.
2750        ///
2751        ///
2752        /// Parameter `replacementMetadata`: A dictionary of keys and values from
2753        /// <ImageIO
2754        /// /CGImageProperties.h>. To preserve existing metadata to the file, pass self.metadata. To strip existing metadata, pass nil. To replace metadata, pass a replacement dictionary.
2755        ///
2756        /// Parameter `replacementEmbeddedThumbnailPhotoFormat`: A dictionary of keys and values from
2757        /// <AVFoundation
2758        /// /AVVideoSettings.h>. If you pass a non-nil dictionary, AVVideoCodecKey is required, with AVVideoWidthKey and AVVideoHeightKey being optional. To preserve the existing embedded thumbnail photo to the file, pass self.embeddedThumbnailPhotoFormat and pass nil as your replacementEmbeddedThumbnailPixelBuffer parameter. To strip the existing embedded thumbnail, pass nil for both replacementEmbeddedThumbnailPhotoFormat and replacementEmbeddedThumbnailPixelBuffer. To replace the existing embedded thumbnail photo, pass both a non-nil replacementThumbnailPixelBuffer and replacementEmbeddedThumbnailPhotoFormat dictionary.
2759        ///
2760        /// Parameter `replacementEmbeddedThumbnailPixelBuffer`: A pixel buffer containing a source image to be encoded to the file as the replacement thumbnail image. To preserve the existing embedded thumbnail photo to the file, pass self.embeddedThumbnailPhotoFormat as your replacementEmbeddedThumbnailPhotoFormat parameter and nil as your replacementEmbeddedThumbnailPixelBuffer parameter. To strip the existing embedded thumbnail, pass nil for both replacementEmbeddedThumbnailPhotoFormat and replacementEmbeddedThumbnailPixelBuffer. To replace the existing embedded thumbnail photo, pass both a non-nil replacementThumbnailPixelBuffer and replacementEmbeddedThumbnailPhotoFormat dictionary.
2761        ///
2762        /// Parameter `replacementDepthData`: Replacement depth data to be written to the flattened file container. To preserve existing depth data to the file, pass self.depthData. To strip it, pass nil. To replace it, pass a new AVDepthData instance.
2763        ///
2764        /// Returns: An NSData containing bits in the file container's format, or nil if the flattening process fails.
2765        ///
2766        /// # Safety
2767        ///
2768        /// - `replacement_metadata` generic should be of the correct type.
2769        /// - `replacement_embedded_thumbnail_photo_format` generic should be of the correct type.
2770        #[deprecated]
2771        #[unsafe(method(fileDataRepresentationWithReplacementMetadata:replacementEmbeddedThumbnailPhotoFormat:replacementEmbeddedThumbnailPixelBuffer:replacementDepthData:))]
2772        #[unsafe(method_family = none)]
2773        pub unsafe fn fileDataRepresentationWithReplacementMetadata_replacementEmbeddedThumbnailPhotoFormat_replacementEmbeddedThumbnailPixelBuffer_replacementDepthData(
2774            &self,
2775            replacement_metadata: Option<&NSDictionary<NSString, AnyObject>>,
2776            replacement_embedded_thumbnail_photo_format: Option<&NSDictionary<NSString, AnyObject>>,
2777            replacement_embedded_thumbnail_pixel_buffer: Option<&CVPixelBuffer>,
2778            replacement_depth_data: Option<&AVDepthData>,
2779        ) -> Option<Retained<NSData>>;
2780
2781        #[cfg(feature = "objc2-core-graphics")]
2782        /// Utility method that converts the AVCapturePhoto's primary photo to a CGImage.
2783        ///
2784        ///
2785        /// Returns: A CGImageRef, or nil if the conversion process fails.
2786        ///
2787        ///
2788        /// Each time you access this method, AVCapturePhoto generates a new CGImageRef. When backed by a compressed container (such as HEIC), the CGImageRepresentation is decoded lazily as needed. When backed by an uncompressed format such as BGRA, it is copied into a separate backing buffer whose lifetime is not tied to that of the AVCapturePhoto. For a 12 megapixel image, a BGRA CGImage represents ~48 megabytes per call. If you only intend to use the CGImage for on-screen rendering, use the previewCGImageRepresentation instead. Note that the physical rotation of the CGImageRef matches that of the main image. Exif orientation has not been applied. If you wish to apply rotation when working with UIImage, you can do so by querying the photo's metadata[kCGImagePropertyOrientation] value, and passing it as the orientation parameter to +[UIImage imageWithCGImage:scale:orientation:]. RAW images always return a CGImageRepresentation of nil. If you wish to make a CGImageRef from a RAW image, use CIRAWFilter in the CoreImage framework.
2789        #[unsafe(method(CGImageRepresentation))]
2790        #[unsafe(method_family = none)]
2791        pub unsafe fn CGImageRepresentation(&self) -> Option<Retained<CGImage>>;
2792
2793        #[cfg(feature = "objc2-core-graphics")]
2794        /// Utility method that converts the AVCapturePhoto's preview photo to a CGImage.
2795        ///
2796        ///
2797        /// Returns: A CGImageRef, or nil if the conversion process fails, or if you did not request a preview photo.
2798        ///
2799        ///
2800        /// Each time you access this method, AVCapturePhoto generates a new CGImageRef. This CGImageRepresentation is a RGB rendering of the previewPixelBuffer property. If you did not request a preview photo by setting the -[AVCapturePhotoSettings previewPhotoFormat] property, this method returns nil. Note that the physical rotation of the CGImageRef matches that of the main image. Exif orientation has not been applied. If you wish to apply rotation when working with UIImage, you can do so by querying the photo's metadata[kCGImagePropertyOrientation] value, and passing it as the orientation parameter to +[UIImage imageWithCGImage:scale:orientation:].
2801        #[unsafe(method(previewCGImageRepresentation))]
2802        #[unsafe(method_family = none)]
2803        pub unsafe fn previewCGImageRepresentation(&self) -> Option<Retained<CGImage>>;
2804    );
2805}
2806
2807/// Constants indicating the status of the lens stabilization module (aka OIS).
2808///
2809///
2810/// Indicates that lens stabilization is unsupported.
2811///
2812/// Indicates that lens stabilization was not in use for this capture.
2813///
2814/// Indicates that the lens stabilization module was active for the duration of the capture.
2815///
2816/// Indicates that device motion or capture duration exceeded the stabilization module's correction limits.
2817///
2818/// Indicates that the lens stabilization module was unavailable for use at the time of capture. The module may be available in subsequent captures.
2819///
2820/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturelensstabilizationstatus?language=objc)
2821// NS_ENUM
2822#[repr(transparent)]
2823#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
2824pub struct AVCaptureLensStabilizationStatus(pub NSInteger);
2825impl AVCaptureLensStabilizationStatus {
2826    #[doc(alias = "AVCaptureLensStabilizationStatusUnsupported")]
2827    pub const Unsupported: Self = Self(0);
2828    #[doc(alias = "AVCaptureLensStabilizationStatusOff")]
2829    pub const Off: Self = Self(1);
2830    #[doc(alias = "AVCaptureLensStabilizationStatusActive")]
2831    pub const Active: Self = Self(2);
2832    #[doc(alias = "AVCaptureLensStabilizationStatusOutOfRange")]
2833    pub const OutOfRange: Self = Self(3);
2834    #[doc(alias = "AVCaptureLensStabilizationStatusUnavailable")]
2835    pub const Unavailable: Self = Self(4);
2836}
2837
2838unsafe impl Encode for AVCaptureLensStabilizationStatus {
2839    const ENCODING: Encoding = NSInteger::ENCODING;
2840}
2841
2842unsafe impl RefEncode for AVCaptureLensStabilizationStatus {
2843    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
2844}
2845
2846/// AVCapturePhotoBracketedCapture.
2847impl AVCapturePhoto {
2848    extern_methods!(
2849        #[cfg(feature = "AVCaptureStillImageOutput")]
2850        /// The AVCaptureBracketedStillImageSettings associated with this photo.
2851        ///
2852        ///
2853        /// When specifying a bracketed capture using AVCapturePhotoBracketSettings, you specify an array of AVCaptureBracketedStillImageSettings -- one per image in the bracket. This property indicates the AVCaptureBracketedStillImageSettings associated with this particular photo, or nil if this photo is not part of a bracketed capture.
2854        #[unsafe(method(bracketSettings))]
2855        #[unsafe(method_family = none)]
2856        pub unsafe fn bracketSettings(
2857            &self,
2858        ) -> Option<Retained<AVCaptureBracketedStillImageSettings>>;
2859
2860        /// 1-based sequence count of the photo.
2861        ///
2862        ///
2863        /// If this photo is part of a bracketed capture (invoked using AVCapturePhotoBracketSettings), this property indicates the current result's count in the sequence, starting with 1 for the first result, or 0 if this photo is not part of a bracketed capture.
2864        #[unsafe(method(sequenceCount))]
2865        #[unsafe(method_family = none)]
2866        pub unsafe fn sequenceCount(&self) -> NSInteger;
2867
2868        /// The status of the lens stabilization module during capture of this photo.
2869        ///
2870        ///
2871        /// In configurations where lens stabilization (OIS) is unsupported, AVCaptureLensStabilizationStatusUnsupported is returned. If lens stabilization is supported, but this photo is not part of a bracketed capture in which -[AVCapturePhotoBracketSettings setLensStabilizationEnabled:YES] was called, AVCaptureLensStabilizationStatusOff is returned. Otherwise a lens stabilization status is returned indicating how lens stabilization was applied during the capture.
2872        #[unsafe(method(lensStabilizationStatus))]
2873        #[unsafe(method_family = none)]
2874        pub unsafe fn lensStabilizationStatus(&self) -> AVCaptureLensStabilizationStatus;
2875    );
2876}
2877
2878extern_class!(
2879    /// A lightly-processed photo whose data may be used to process and fetch a higher-resolution asset at a later time.
2880    ///
2881    ///
2882    /// An AVCaptureDeferredPhotoProxy behaves like a normal AVCapturePhoto, and approximates the look of the final rendered image.  This object represents intermediate data that can be rendered into a final image and ingested into the user's photo library via PHAsset APIs.  The intermediate data are not accessible by the calling process.
2883    ///
2884    /// Use a PHAssetCreationRequest with a resourceType of PHAssetResourceTypePhotoProxy using the fileDataRepresentation of this object.  Image processing to finalize the asset will occur either on-demand when accessing the image data via PHImageManager or PHAssetResource, or will execute in the background when the system has determined that it's a good time to process based on thermals, battery level, and other conditions.  If the data provided to the PHAssetCreationRequest does not come from an AVCaptureDeferredPhotoProxy, then PHAssetCreationRequest will fail and a PHPhotosErrorInvalidResource error will be returned.
2885    ///
2886    /// Below is a discussion of how the superclass properties behave on an AVCaptureDeferredPhotoProxy.
2887    ///
2888    /// The time of the capture; proxy and final photos will have the same timestamp.
2889    ///
2890    ///
2891    /// The metadata of the proxy image may differ slightly from the final photo's metadata where some fields may be updated.
2892    ///
2893    ///
2894    /// Always NO, as deferred processing isn't available for raw photos.
2895    ///
2896    ///
2897    /// Describes the embedded thumbnail format of both the proxy and the final photo which have the same dimensions and codec.
2898    ///
2899    ///
2900    /// Describes the resolved settings of the whole capture, including the proxy and final photo. See AVCaptureResolvedPhotoSettings.deferredPhotoProxyDimensions.
2901    ///
2902    ///
2903    /// Same for both proxy and final.
2904    ///
2905    ///
2906    /// Same for both proxy and final.
2907    ///
2908    ///
2909    /// Same for both proxy and final.
2910    ///
2911    ///
2912    /// Same for both proxy and final.
2913    ///
2914    ///
2915    /// Same for both proxy and final.
2916    ///
2917    /// Superclass properties/methods that behave differently than a typical AVCapturePhoto:
2918    ///
2919    ///
2920    /// - (nullable CGImageRef)CGImageRepresentation;
2921    /// - (nullable CGImageRef)previewCGImageRepresentation;
2922    /// All of the above properties return the same proxy image, either as a pixel buffer or CGImageRef.
2923    ///
2924    /// - (nullable NSData *)fileDataRepresentation;
2925    /// - (nullable NSData *)fileDataRepresentationWithCustomizer:(id
2926    /// <AVCapturePhotoFileDataRepresentationCustomizer
2927    /// >)customizer;
2928    /// You may call either of the above two methods to create a NSData representation of the image, but note that it is only the proxy image quality being packaged.
2929    ///
2930    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturedeferredphotoproxy?language=objc)
2931    #[unsafe(super(AVCapturePhoto, NSObject))]
2932    #[derive(Debug, PartialEq, Eq, Hash)]
2933    pub struct AVCaptureDeferredPhotoProxy;
2934);
2935
2936extern_conformance!(
2937    unsafe impl NSObjectProtocol for AVCaptureDeferredPhotoProxy {}
2938);
2939
2940impl AVCaptureDeferredPhotoProxy {
2941    extern_methods!(
2942        #[unsafe(method(init))]
2943        #[unsafe(method_family = init)]
2944        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
2945
2946        #[unsafe(method(new))]
2947        #[unsafe(method_family = new)]
2948        pub unsafe fn new() -> Retained<Self>;
2949    );
2950}
2951
2952extern_protocol!(
2953    /// A set of delegate callbacks to be implemented by a client who calls AVCapturePhoto's -fileDataRepresentationWithCustomizer:.
2954    ///
2955    ///
2956    /// AVCapturePhoto is a wrapper representing a file-containerized photo in memory. If you simply wish to flatten the photo to an NSData to be written to a file, you may call -[AVCapturePhoto fileDataRepresentation]. For more complex flattening operations in which you wish to replace or strip metadata, you should call -[AVCapturePhoto fileDataRepresentationWithCustomizer:] instead, providing a delegate for customized stripping / replacing behavior. This delegate's methods are called synchronously before the flattening process commences.
2957    ///
2958    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturephotofiledatarepresentationcustomizer?language=objc)
2959    pub unsafe trait AVCapturePhotoFileDataRepresentationCustomizer:
2960        NSObjectProtocol
2961    {
2962        /// A callback in which you may provide replacement metadata, or direct the AVCapturePhoto to strip existing metadata from the flattened file data representation.
2963        ///
2964        ///
2965        /// Parameter `photo`: The calling instance of AVCapturePhoto.
2966        ///
2967        /// Returns: A dictionary of keys and values from
2968        /// <ImageIO
2969        /// /CGImageProperties.h>. To preserve existing metadata, return photo.metadata. To strip existing metadata, return nil. To replace metadata, pass a replacement dictionary.
2970        ///
2971        ///
2972        /// This callback is optional. If your delegate does not implement this callback, the existing metadata in the in-memory AVCapturePhoto container will be written to the file data representation.
2973        #[optional]
2974        #[unsafe(method(replacementMetadataForPhoto:))]
2975        #[unsafe(method_family = none)]
2976        unsafe fn replacementMetadataForPhoto(
2977            &self,
2978            photo: &AVCapturePhoto,
2979        ) -> Option<Retained<NSDictionary<NSString, AnyObject>>>;
2980
2981        #[cfg(feature = "objc2-core-video")]
2982        /// A callback in which you may provide a replacement embedded thumbnail image with compression settings, or strip the existing embedded thumbnail image from the flattened file data representation.
2983        ///
2984        ///
2985        /// Parameter `replacementEmbeddedThumbnailPhotoFormatOut`: On output, a pointer to a dictionary of keys and values from
2986        /// <AVFoundation
2987        /// /AVVideoSettings.h> If you pass a non-nil dictionary, AVVideoCodecKey is required, with AVVideoWidthKey and AVVideoHeightKey being optional. To preserve the existing embedded thumbnail photo to the flattened data, set *replacementEmbeddedThumbnailPhotoFormatOut to photo.embeddedThumbnailPhotoFormat and return nil. To strip the existing embedded thumbnail, set *replacementEmbeddedThumbnailPhotoFormatOut to nil and return nil. To replace the existing embedded thumbnail photo, pass a replacement photo format dictionary and return a non-nil replacement pixel buffer.
2988        ///
2989        /// Parameter `photo`: The calling instance of AVCapturePhoto.
2990        ///
2991        /// Returns: A pixel buffer containing a source image to be encoded to the file as the replacement thumbnail image. To preserve the existing embedded thumbnail photo to the flattened data, set *replacementEmbeddedThumbnailPhotoFormatOut to photo.embeddedThumbnailPhotoFormat and return nil. To strip the existing embedded thumbnail, set *replacementEmbeddedThumbnailPhotoFormatOut to nil and return nil. To replace the existing embedded thumbnail photo, pass a replacement photo format dictionary and return a non-nil replacement pixel buffer.
2992        ///
2993        ///
2994        /// This callback is optional. If your delegate does not implement this callback, the existing embedded thumbnail photo in the in-memory AVCapturePhoto container will be written to the file data representation.
2995        ///
2996        /// # Safety
2997        ///
2998        /// `replacement_embedded_thumbnail_photo_format_out` generic should be of the correct type.
2999        #[optional]
3000        #[unsafe(method(replacementEmbeddedThumbnailPixelBufferWithPhotoFormat:forPhoto:))]
3001        #[unsafe(method_family = none)]
3002        unsafe fn replacementEmbeddedThumbnailPixelBufferWithPhotoFormat_forPhoto(
3003            &self,
3004            replacement_embedded_thumbnail_photo_format_out: &mut Option<
3005                Retained<NSDictionary<NSString, AnyObject>>,
3006            >,
3007            photo: &AVCapturePhoto,
3008        ) -> Option<Retained<CVPixelBuffer>>;
3009
3010        #[cfg(feature = "AVDepthData")]
3011        /// A callback in which you may provide replacement depth data, or strip the existing depth data from the flattened file data representation.
3012        ///
3013        ///
3014        /// Parameter `photo`: The calling instance of AVCapturePhoto.
3015        ///
3016        /// Returns: An instance of AVDepthData. To preserve the existing depth data, return photo.depthData. To strip the existing one, return nil. To replace, provide a replacement AVDepthData instance.
3017        ///
3018        ///
3019        /// This callback is optional. If your delegate does not implement this callback, the existing depth data in the in-memory AVCapturePhoto container will be written to the file data representation.
3020        #[optional]
3021        #[unsafe(method(replacementDepthDataForPhoto:))]
3022        #[unsafe(method_family = none)]
3023        unsafe fn replacementDepthDataForPhoto(
3024            &self,
3025            photo: &AVCapturePhoto,
3026        ) -> Option<Retained<AVDepthData>>;
3027
3028        #[cfg(feature = "AVPortraitEffectsMatte")]
3029        /// A callback in which you may provide a replacement portrait effects matte, or strip the existing portrait effects matte from the flattened file data representation.
3030        ///
3031        ///
3032        /// Parameter `photo`: The calling instance of AVCapturePhoto.
3033        ///
3034        /// Returns: An instance of AVPortraitEffectsMatte. To preserve the existing portrait effects matte, return photo.portraitEffectsMatte. To strip the existing one, return nil. To replace, provide a replacement AVPortraitEffectsMatte instance.
3035        ///
3036        ///
3037        /// This callback is optional. If your delegate does not implement this callback, the existing portrait effects matte in the in-memory AVCapturePhoto container will be written to the file data representation.
3038        #[optional]
3039        #[unsafe(method(replacementPortraitEffectsMatteForPhoto:))]
3040        #[unsafe(method_family = none)]
3041        unsafe fn replacementPortraitEffectsMatteForPhoto(
3042            &self,
3043            photo: &AVCapturePhoto,
3044        ) -> Option<Retained<AVPortraitEffectsMatte>>;
3045
3046        #[cfg(feature = "AVSemanticSegmentationMatte")]
3047        /// A callback in which you may provide a replacement semantic segmentation matte of the indicated type, or strip the existing one from the flattened file data representation.
3048        ///
3049        ///
3050        /// Parameter `semanticSegmentationMatteType`: The type of semantic segmentation matte to be replaced or stripped.
3051        ///
3052        /// Parameter `photo`: The calling instance of AVCapturePhoto.
3053        ///
3054        /// Returns: An instance of AVSemanticSegmentationMatte. To preserve the existing matte, return [photo semanticSegmentationMatteForType:semanticSegmentationMatteType]. To strip the existing one, return nil. To replace, provide a replacement AVPortraitEffectsMatte instance.
3055        ///
3056        ///
3057        /// This callback is optional. If your delegate does not implement this callback, the existing semantic segmentation matte of the specified type in the in-memory AVCapturePhoto container will be written to the file data representation.
3058        #[optional]
3059        #[unsafe(method(replacementSemanticSegmentationMatteOfType:forPhoto:))]
3060        #[unsafe(method_family = none)]
3061        unsafe fn replacementSemanticSegmentationMatteOfType_forPhoto(
3062            &self,
3063            semantic_segmentation_matte_type: &AVSemanticSegmentationMatteType,
3064            photo: &AVCapturePhoto,
3065        ) -> Option<Retained<AVSemanticSegmentationMatte>>;
3066
3067        /// A callback in which you may provide replacement compression settings for the DNG flattened file data representation of Apple ProRAW. This callback will only be invoked for Apple ProRAW captures written to DNG.
3068        ///
3069        ///
3070        /// Parameter `photo`: The calling instance of AVCapturePhoto.
3071        ///
3072        /// Parameter `defaultSettings`: The default settings that will be used if not overridden.
3073        ///
3074        /// Parameter `maximumBitDepth`: The maximum bit depth that can be specified with AVVideoAppleProRAWBitDepthKey in the returned settings dictionary.
3075        ///
3076        /// Returns: An NSDictionary containing compression settings to be used when writing the DNG file representation. Currently accepted keys are:
3077        /// AVVideoQualityKey (NSNumber in range 0 to 1.0, inclusive)
3078        /// AVVideoAppleProRAWBitDepthKey (NSNumber in range 8 to maximumBitDepth, inclusive)
3079        /// Setting AVVideoQualityKey to 1.0 will use lossless compression. Any value between 0 and 1.0 will use lossy compression with that quality.
3080        /// Setting AVVideoAppleProRAWBitDepthKey to a value less than what is given in defaultSettings may result in quantization losses.
3081        /// Any keys not specified in the returned dictionary will use the values from defaultSettings. Return defaultSettings if no changes to the compression settings are desired.
3082        ///
3083        ///
3084        /// This callback is optional. If your delegate does not implement this callback, the default compression settings for the file type will be used.
3085        ///
3086        /// # Safety
3087        ///
3088        /// `default_settings` generic should be of the correct type.
3089        #[optional]
3090        #[unsafe(method(replacementAppleProRAWCompressionSettingsForPhoto:defaultSettings:maximumBitDepth:))]
3091        #[unsafe(method_family = none)]
3092        unsafe fn replacementAppleProRAWCompressionSettingsForPhoto_defaultSettings_maximumBitDepth(
3093            &self,
3094            photo: &AVCapturePhoto,
3095            default_settings: &NSDictionary<NSString, AnyObject>,
3096            maximum_bit_depth: NSInteger,
3097        ) -> Retained<NSDictionary<NSString, AnyObject>>;
3098    }
3099);