objc2_av_foundation/generated/
AVCaptureInput.rs

1//! This file has been automatically generated by `objc2`'s `header-translator`.
2//! DO NOT EDIT
3use core::ffi::*;
4use core::ptr::NonNull;
5use objc2::__framework_prelude::*;
6#[cfg(feature = "objc2-core-foundation")]
7use objc2_core_foundation::*;
8#[cfg(feature = "objc2-core-graphics")]
9use objc2_core_graphics::*;
10#[cfg(feature = "objc2-core-media")]
11use objc2_core_media::*;
12use objc2_foundation::*;
13
14use crate::*;
15
16extern_class!(
17    /// AVCaptureInput is an abstract class that provides an interface for connecting capture input sources to an AVCaptureSession.
18    ///
19    ///
20    /// Concrete instances of AVCaptureInput representing input sources such as cameras can be added to instances of AVCaptureSession using the -[AVCaptureSession addInput:] method. An AVCaptureInput vends one or more streams of media data. For example, input devices can provide both audio and video data. Each media stream provided by an input is represented by an AVCaptureInputPort object. Within a capture session, connections are made between AVCaptureInput instances and AVCaptureOutput instances via AVCaptureConnection objects that define the mapping between a set of AVCaptureInputPort objects and a single AVCaptureOutput.
21    ///
22    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcaptureinput?language=objc)
23    #[unsafe(super(NSObject))]
24    #[derive(Debug, PartialEq, Eq, Hash)]
25    pub struct AVCaptureInput;
26);
27
28extern_conformance!(
29    unsafe impl NSObjectProtocol for AVCaptureInput {}
30);
31
32impl AVCaptureInput {
33    extern_methods!(
34        #[unsafe(method(init))]
35        #[unsafe(method_family = init)]
36        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
37
38        #[unsafe(method(new))]
39        #[unsafe(method_family = new)]
40        pub unsafe fn new() -> Retained<Self>;
41
42        /// The ports owned by the receiver.
43        ///
44        ///
45        /// The value of this property is an array of AVCaptureInputPort objects, each exposing an interface to a single stream of media data provided by an input.
46        #[unsafe(method(ports))]
47        #[unsafe(method_family = none)]
48        pub unsafe fn ports(&self) -> Retained<NSArray<AVCaptureInputPort>>;
49    );
50}
51
52extern "C" {
53    /// This notification is posted when the value of an AVCaptureInputPort instance's formatDescription property changes.
54    ///
55    ///
56    /// The notification object is the AVCaptureInputPort instance whose format description changed.
57    ///
58    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcaptureinputportformatdescriptiondidchangenotification?language=objc)
59    pub static AVCaptureInputPortFormatDescriptionDidChangeNotification:
60        &'static NSNotificationName;
61}
62
63extern_class!(
64    /// An AVCaptureInputPort describes a single stream of media data provided by an AVCaptureInput and provides an interface for connecting that stream to AVCaptureOutput instances via AVCaptureConnection.
65    ///
66    ///
67    /// Instances of AVCaptureInputPort cannot be created directly. An AVCaptureInput exposes its input ports via its ports property. Input ports provide information about the format of their media data via the mediaType and formatDescription properties, and allow clients to control the flow of data via the enabled property. Input ports are used by an AVCaptureConnection to define the mapping between inputs and outputs in an AVCaptureSession.
68    ///
69    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcaptureinputport?language=objc)
70    #[unsafe(super(NSObject))]
71    #[derive(Debug, PartialEq, Eq, Hash)]
72    pub struct AVCaptureInputPort;
73);
74
75extern_conformance!(
76    unsafe impl NSObjectProtocol for AVCaptureInputPort {}
77);
78
79impl AVCaptureInputPort {
80    extern_methods!(
81        #[unsafe(method(init))]
82        #[unsafe(method_family = init)]
83        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
84
85        #[unsafe(method(new))]
86        #[unsafe(method_family = new)]
87        pub unsafe fn new() -> Retained<Self>;
88
89        /// The input that owns the receiver.
90        ///
91        ///
92        /// The value of this property is an AVCaptureInput instance that owns the receiver.
93        #[unsafe(method(input))]
94        #[unsafe(method_family = none)]
95        pub unsafe fn input(&self) -> Retained<AVCaptureInput>;
96
97        #[cfg(feature = "AVMediaFormat")]
98        /// The media type of the data provided by the receiver.
99        ///
100        ///
101        /// The value of this property is a constant describing the type of media, such as AVMediaTypeVideo or AVMediaTypeAudio, provided by the receiver. Media type constants are defined in AVMediaFormat.h.
102        #[unsafe(method(mediaType))]
103        #[unsafe(method_family = none)]
104        pub unsafe fn mediaType(&self) -> Retained<AVMediaType>;
105
106        #[cfg(feature = "objc2-core-media")]
107        /// The format of the data provided by the receiver.
108        ///
109        ///
110        /// The value of this property is a CMFormatDescription that describes the format of the media data currently provided by the receiver. Clients can be notified of changes to the format by observing the AVCaptureInputPortFormatDescriptionDidChangeNotification.
111        #[unsafe(method(formatDescription))]
112        #[unsafe(method_family = none)]
113        pub unsafe fn formatDescription(&self) -> Option<Retained<CMFormatDescription>>;
114
115        /// Whether the receiver should provide data.
116        ///
117        ///
118        /// The value of this property is a BOOL that determines whether the receiver should provide data to outputs when a session is running. Clients can set this property to fine tune which media streams from a given input will be used during capture. The default value is YES.
119        #[unsafe(method(isEnabled))]
120        #[unsafe(method_family = none)]
121        pub unsafe fn isEnabled(&self) -> bool;
122
123        /// Setter for [`isEnabled`][Self::isEnabled].
124        #[unsafe(method(setEnabled:))]
125        #[unsafe(method_family = none)]
126        pub unsafe fn setEnabled(&self, enabled: bool);
127
128        #[cfg(feature = "objc2-core-media")]
129        /// Provides access to the "native" clock used by the input port.
130        ///
131        ///
132        /// The clock is read-only.
133        #[unsafe(method(clock))]
134        #[unsafe(method_family = none)]
135        pub unsafe fn clock(&self) -> Option<Retained<CMClock>>;
136
137        #[cfg(feature = "AVCaptureDevice")]
138        /// The AVCaptureDeviceType of the source device providing input through this port.
139        ///
140        ///
141        /// All AVCaptureInputPorts contained in an AVCaptureDeviceInput's ports array have the same sourceDeviceType, which is equal to deviceInput.device.deviceType. All of these ports are legal for use in an AVCaptureSession. When working with virtual devices such as the DualCamera in an AVCaptureMultiCamSession, it is possible to stream media from the virtual device's constituent device streams by discovering and connecting hidden ports. In the case of the DualCamera, its constituent devices are the WideAngle camera and the Telephoto camera. By calling -[AVCaptureDeviceInput portsWithMediaType:sourceDeviceType:sourceDevicePosition:], you may discover ports originating from one or more of the virtual device's constituent devices and then make connections using those ports. Constituent device ports are never present in their owning virtual device input's ports array. As an example, to find the video port originating from the DualCamera's Telephoto camera constituent device, you call [dualCameraDeviceInput portsWithMediaType:AVMediaTypeVideo sourceDeviceType:AVCaptureDeviceTypeBuiltInTelephotoCamera sourceDevicePosition:dualCamera.position] and use the first port in the resulting array.
142        #[unsafe(method(sourceDeviceType))]
143        #[unsafe(method_family = none)]
144        pub unsafe fn sourceDeviceType(&self) -> Option<Retained<AVCaptureDeviceType>>;
145
146        #[cfg(feature = "AVCaptureDevice")]
147        /// The AVCaptureDevicePosition of the source device providing input through this port.
148        ///
149        ///
150        /// All AVCaptureInputPorts contained in an AVCaptureDeviceInput's ports array have the same sourceDevicePosition, which is deviceInput.device.position. When working with microphone input in an AVCaptureMultiCamSession, it is possible to record multiple microphone directions simultaneously, for instance, to record front-facing microphone input to pair with video from the front facing camera, and back-facing microphone input to pair with the video from the back-facing camera. By calling -[AVCaptureDeviceInput portsWithMediaType:sourceDeviceType:sourceDevicePosition:], you may discover additional hidden ports originating from the source audio device. These ports represent individual microphones positioned to pick up audio from one particular direction. Examples follow.
151        ///
152        /// To discover the audio port that captures omnidirectional audio, use [microphoneDeviceInput portsWithMediaType:AVMediaTypeAudio sourceDeviceType:AVCaptureDeviceTypeMicrophone sourceDevicePosition:AVCaptureDevicePositionUnspecified].firstObject.
153        /// To discover the audio port that captures front-facing audio, use [microphoneDeviceInput portsWithMediaType:AVMediaTypeAudio sourceDeviceType:AVCaptureDeviceTypeMicrophone sourceDevicePosition:AVCaptureDevicePositionFront].firstObject.
154        /// To discover the audio port that captures back-facing audio, use [microphoneDeviceInput portsWithMediaType:AVMediaTypeAudio sourceDeviceType:AVCaptureDeviceTypeMicrophone sourceDevicePosition:AVCaptureDevicePositionBack].firstObject.
155        #[unsafe(method(sourceDevicePosition))]
156        #[unsafe(method_family = none)]
157        pub unsafe fn sourceDevicePosition(&self) -> AVCaptureDevicePosition;
158    );
159}
160
161/// Constants indicating the modes of multichannel audio.
162///
163///
164/// Indicates that no multichannel audio should be used.
165///
166/// Indicates that the audio should be recorded using stereo.
167///
168/// Indicates that the audio should be recorded using first-order ambisonics. When recording a QuickTime movie file, a stereo audio track will be recorded alongside the FOA track for backward playback compatibility.
169///
170/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturemultichannelaudiomode?language=objc)
171// NS_ENUM
172#[repr(transparent)]
173#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
174pub struct AVCaptureMultichannelAudioMode(pub NSInteger);
175impl AVCaptureMultichannelAudioMode {
176    #[doc(alias = "AVCaptureMultichannelAudioModeNone")]
177    pub const None: Self = Self(0);
178    #[doc(alias = "AVCaptureMultichannelAudioModeStereo")]
179    pub const Stereo: Self = Self(1);
180    #[doc(alias = "AVCaptureMultichannelAudioModeFirstOrderAmbisonics")]
181    pub const FirstOrderAmbisonics: Self = Self(2);
182}
183
184unsafe impl Encode for AVCaptureMultichannelAudioMode {
185    const ENCODING: Encoding = NSInteger::ENCODING;
186}
187
188unsafe impl RefEncode for AVCaptureMultichannelAudioMode {
189    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
190}
191
192extern_class!(
193    /// AVCaptureDeviceInput is a concrete subclass of AVCaptureInput that provides an interface for capturing media from an AVCaptureDevice.
194    ///
195    ///
196    /// Instances of AVCaptureDeviceInput are input sources for AVCaptureSession that provide media data from devices connected to the system, represented by instances of AVCaptureDevice.
197    ///
198    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturedeviceinput?language=objc)
199    #[unsafe(super(AVCaptureInput, NSObject))]
200    #[derive(Debug, PartialEq, Eq, Hash)]
201    pub struct AVCaptureDeviceInput;
202);
203
204extern_conformance!(
205    unsafe impl NSObjectProtocol for AVCaptureDeviceInput {}
206);
207
208impl AVCaptureDeviceInput {
209    extern_methods!(
210        #[cfg(feature = "AVCaptureDevice")]
211        /// Returns an AVCaptureDeviceInput instance that provides media data from the given device.
212        ///
213        ///
214        /// Parameter `device`: An AVCaptureDevice instance to be used for capture.
215        ///
216        /// Parameter `outError`: On return, if the given device cannot be used for capture, points to an NSError describing the problem.
217        ///
218        /// Returns: An AVCaptureDeviceInput instance that provides data from the given device, or nil, if the device could not be used for capture.
219        ///
220        ///
221        /// This method returns an instance of AVCaptureDeviceInput that can be used to capture data from an AVCaptureDevice in an AVCaptureSession. This method attempts to open the device for capture, taking exclusive control of it if necessary. If the device cannot be opened because it is no longer available or because it is in use, for example, this method returns nil, and the optional outError parameter points to an NSError describing the problem.
222        #[unsafe(method(deviceInputWithDevice:error:_))]
223        #[unsafe(method_family = none)]
224        pub unsafe fn deviceInputWithDevice_error(
225            device: &AVCaptureDevice,
226        ) -> Result<Retained<Self>, Retained<NSError>>;
227
228        #[cfg(feature = "AVCaptureDevice")]
229        /// Creates an AVCaptureDeviceInput instance that provides media data from the given device.
230        ///
231        ///
232        /// Parameter `device`: An AVCaptureDevice instance to be used for capture.
233        ///
234        /// Parameter `outError`: On return, if the given device cannot be used for capture, points to an NSError describing the problem.
235        ///
236        /// Returns: An AVCaptureDeviceInput instance that provides data from the given device, or nil, if the device could not be used for capture.
237        ///
238        ///
239        /// This method creates an instance of AVCaptureDeviceInput that can be used to capture data from an AVCaptureDevice in an AVCaptureSession. This method attempts to open the device for capture, taking exclusive control of it if necessary. If the device cannot be opened because it is no longer available or because it is in use, for example, this method returns nil, and the optional outError parameter points to an NSError describing the problem.
240        #[unsafe(method(initWithDevice:error:_))]
241        #[unsafe(method_family = init)]
242        pub unsafe fn initWithDevice_error(
243            this: Allocated<Self>,
244            device: &AVCaptureDevice,
245        ) -> Result<Retained<Self>, Retained<NSError>>;
246
247        #[cfg(feature = "AVCaptureDevice")]
248        /// The device from which the receiver provides data.
249        ///
250        ///
251        /// The value of this property is the AVCaptureDevice instance that was used to create the receiver.
252        #[unsafe(method(device))]
253        #[unsafe(method_family = none)]
254        pub unsafe fn device(&self) -> Retained<AVCaptureDevice>;
255
256        /// Specifies whether the source device should use the same default auto exposure behaviors for -[AVCaptureSession setSessionPreset:] and -[AVCaptureDevice setActiveFormat:].
257        ///
258        ///
259        /// AVCaptureDevice's activeFormat property may be set two different ways. 1) You set it directly using one of the formats in the device's -formats array, or 2) the AVCaptureSession sets it on your behalf when you set the AVCaptureSession's sessionPreset property. Depending on the device and format, the default auto exposure behavior may be configured differently when you use one method or the other, resulting in non-uniform auto exposure behavior. Auto exposure defaults include min frame rate, max frame rate, and max exposure duration. If you wish to ensure that consistent default behaviors are applied to the device regardless of the API you use to configure the activeFormat, you may set the device input's unifiedAutoExposureDefaultsEnabled property to YES. Default value for this property is NO.
260        ///
261        /// Note that if you manually set the device's min frame rate, max frame rate, or max exposure duration, your custom values will override the device defaults regardless of whether you've set this property to YES.
262        #[unsafe(method(unifiedAutoExposureDefaultsEnabled))]
263        #[unsafe(method_family = none)]
264        pub unsafe fn unifiedAutoExposureDefaultsEnabled(&self) -> bool;
265
266        /// Setter for [`unifiedAutoExposureDefaultsEnabled`][Self::unifiedAutoExposureDefaultsEnabled].
267        #[unsafe(method(setUnifiedAutoExposureDefaultsEnabled:))]
268        #[unsafe(method_family = none)]
269        pub unsafe fn setUnifiedAutoExposureDefaultsEnabled(
270            &self,
271            unified_auto_exposure_defaults_enabled: bool,
272        );
273
274        #[cfg(all(feature = "AVCaptureDevice", feature = "AVMediaFormat"))]
275        /// An accessor method used to retrieve a virtual device's constituent device ports for use in an AVCaptureMultiCamSession.
276        ///
277        ///
278        /// Parameter `mediaType`: The AVMediaType of the port for which you're searching, or nil if all media types should be considered.
279        ///
280        /// Parameter `sourceDeviceType`: The AVCaptureDeviceType of the port for which you're searching, or nil if source device type is irrelevant.
281        ///
282        /// Parameter `sourceDevicePosition`: The AVCaptureDevicePosition of the port for which you're searching. AVCaptureDevicePositionUnspecified is germane to audio devices, indicating omnidirectional audio. For other types of capture devices (e.g. cameras), AVCaptureDevicePositionUnspecified means all positions should be considered in the search.
283        ///
284        ///
285        /// Returns: An array of AVCaptureInputPorts satisfying the search criteria, or an empty array could be found.
286        ///
287        ///
288        /// When using AVCaptureMultiCamSession, multiple devices may be run simultaneously. You may also run simultaneous streams from a virtual device such as the Dual Camera. By inspecting a virtual device's constituentDevices property, you can find its underlying physical devices and, using this method, search for ports originating from one of those constituent devices. Note that the AVCaptureInput.ports array does not include constituent device ports for virtual devices. You must use this accessor method to discover the ports for which you're specifically looking. These constituent device ports may be used to make connections to outputs for use with an AVCaptureMultiCamSession. Using the Dual Camera as an example, the AVCaptureInput.ports property exposes only those ports supported by the virtual device (it switches automatically between wide and telephoto cameras according to the zoom factor). You may use this method to find the video ports for the constituentDevices.
289        ///
290        /// AVCaptureInputPort *wideVideoPort = [dualCameraInput portsWithMediaType:AVMediaTypeVideo sourceDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera sourceDevicePosition:AVCaptureDevicePositionBack].firstObject;
291        /// AVCaptureInputPort *teleVideoPort = [dualCameraInput portsWithMediaType:AVMediaTypeVideo sourceDeviceType:AVCaptureDeviceTypeBuiltInTelephotoCamera sourceDevicePosition:AVCaptureDevicePositionBack].firstObject;
292        ///
293        /// These ports may be used to create connections, say, to two AVCaptureVideoDataOutput instances, allowing for synchronized full frame rate delivery of both wide and telephoto streams.
294        ///
295        /// As of iOS 13, constituent device ports may not be connected to AVCapturePhotoOutput instances. Clients who wish to capture multiple photos from a virtual device should use AVCapturePhotoOutput's virtualDeviceConstituentPhotoDeliveryEnabled feature.
296        ///
297        /// When used in conjunction with an audio device, this method allows you to discover microphones in different AVCaptureDevicePositions. When you intend to work with an AVCaptureMultiCamSession, you may use these ports to make connections and simultaneously capture both front facing and back facing audio simultaneously to two different outputs. When used with an AVCaptureMultiCamSession, the audio device port whose sourceDevicePosition is AVCaptureDevicePositionUnspecified produces omnidirectional sound.
298        #[unsafe(method(portsWithMediaType:sourceDeviceType:sourceDevicePosition:))]
299        #[unsafe(method_family = none)]
300        pub unsafe fn portsWithMediaType_sourceDeviceType_sourceDevicePosition(
301            &self,
302            media_type: Option<&AVMediaType>,
303            source_device_type: Option<&AVCaptureDeviceType>,
304            source_device_position: AVCaptureDevicePosition,
305        ) -> Retained<NSArray<AVCaptureInputPort>>;
306
307        #[cfg(feature = "objc2-core-media")]
308        /// A property that acts as a modifier to the AVCaptureDevice's activeVideoMinFrameDuration property. Default value is kCMTimeInvalid.
309        ///
310        ///
311        /// An AVCaptureDevice's activeVideoMinFrameDuration property is the reciprocal of its active maximum frame rate. To limit the max frame rate of the capture device, clients may set the device's activeVideoMinFrameDuration to a value supported by the receiver's activeFormat (see AVCaptureDeviceFormat's videoSupportedFrameRateRanges property). Changes you make to the device's activeVideoMinFrameDuration property take effect immediately without disrupting preview. Therefore, the AVCaptureSession must always allocate sufficient resources to allow the device to run at its activeFormat's max allowable frame rate. If you wish to use a particular device format but only ever run it at lower frame rates (for instance, only run a 1080p240 fps format at a max frame rate of 60), you can set the AVCaptureDeviceInput's videoMinFrameDurationOverride property to the reciprocal of the max frame rate you intend to use before starting the session (or within a beginConfiguration / commitConfiguration block while running the session).
312        ///
313        /// When a device input is added to a session, this property reverts back to the default of kCMTimeInvalid (no override).
314        #[unsafe(method(videoMinFrameDurationOverride))]
315        #[unsafe(method_family = none)]
316        pub unsafe fn videoMinFrameDurationOverride(&self) -> CMTime;
317
318        #[cfg(feature = "objc2-core-media")]
319        /// Setter for [`videoMinFrameDurationOverride`][Self::videoMinFrameDurationOverride].
320        #[unsafe(method(setVideoMinFrameDurationOverride:))]
321        #[unsafe(method_family = none)]
322        pub unsafe fn setVideoMinFrameDurationOverride(
323            &self,
324            video_min_frame_duration_override: CMTime,
325        );
326
327        /// Indicates whether the device input supports locked frame durations.
328        ///
329        /// See ``AVCaptureDeviceInput/activeLockedVideoFrameDuration`` for more information on video frame duration locking.
330        #[unsafe(method(isLockedVideoFrameDurationSupported))]
331        #[unsafe(method_family = none)]
332        pub unsafe fn isLockedVideoFrameDurationSupported(&self) -> bool;
333
334        #[cfg(feature = "objc2-core-media")]
335        /// The receiver's locked frame duration (the reciprocal of its frame rate). Setting this property guarantees the intra-frame duration delivered by the device input is precisely the frame duration you request.
336        ///
337        /// Set this property to run the receiver's associated ``AVCaptureDevice`` at precisely your provided frame rate (expressed as a duration). Query ``AVCaptureDevice/minSupportedLockedVideoFrameDuration`` to find the minimum value supported by this ``AVCaptureDeviceInput``. In order to disable locked video frame duration, set this property to `kCMTimeInvalid`. This property resets itself to `kCMTimeInvalid` when the receiver's attached ``AVCaptureDevice/activeFormat`` changes. When you set this property, its value is also reflected in the receiver's ``AVCaptureDevice/activeVideoMinFrameDuration`` and ``AVCaptureDevice/activeVideoMaxFrameDuration``.
338        ///
339        /// - Note: Locked frame duration availability may change depending on the device configuration. For example, locked frame duration is unsupported when ``AVCaptureDevice/autoVideoFrameRateEnabled`` or ``AVCaptureMovieFileOutput/spatialVideoCaptureEnabled`` is set to `true`.
340        ///
341        /// - Note: Only one ``AVCaptureDeviceInput`` added to an ``AVCaptureMultiCamSession`` can follow an external sync device or run at a locked frame duration.
342        ///
343        /// - Note: Setting this property may cause a lengthy reconfiguration of the receiver, similar to setting ``AVCaptureDevice/activeFormat`` or ``AVCaptureSession/sessionPreset``.
344        ///
345        /// - Important: If you set this property to a valid value while the receiver's ``AVCaptureDevice/minSupportedLockedVideoFrameDuration`` is `kCMTimeInvalid`, it throws an `NSInvalidArgumentException`.
346        ///
347        /// - Important: If you set this property while the receiver's  ``lockedVideoFrameDurationSupported`` property returns `false`, it throws an `NSInvalidArgumentException`.
348        #[unsafe(method(activeLockedVideoFrameDuration))]
349        #[unsafe(method_family = none)]
350        pub unsafe fn activeLockedVideoFrameDuration(&self) -> CMTime;
351
352        #[cfg(feature = "objc2-core-media")]
353        /// Setter for [`activeLockedVideoFrameDuration`][Self::activeLockedVideoFrameDuration].
354        #[unsafe(method(setActiveLockedVideoFrameDuration:))]
355        #[unsafe(method_family = none)]
356        pub unsafe fn setActiveLockedVideoFrameDuration(
357            &self,
358            active_locked_video_frame_duration: CMTime,
359        );
360
361        /// Indicates whether the device input supports being configured to follow an external sync device.
362        ///
363        /// See ``AVCaptureDeviceInput/followExternalSyncDevice:videoFrameDuration:delegate:`` for more information on external sync.
364        #[unsafe(method(isExternalSyncSupported))]
365        #[unsafe(method_family = none)]
366        pub unsafe fn isExternalSyncSupported(&self) -> bool;
367
368        #[cfg(all(feature = "AVExternalSyncDevice", feature = "objc2-core-media"))]
369        /// Configures the the device input to follow an external sync device at the given frame duration.
370        ///
371        /// - Parameter externalSyncDevice: The ``AVExternalSyncDevice`` hardware to follow.
372        /// - Parameter videoFrameDuration: The frame duration to which the ``AVExternalSyncDevice`` is calibrated.
373        /// - Parameter delegate: The delegate to notify when the connection status changes, or an error occurs.
374        ///
375        /// Call this method to direct your ``AVCaptureDeviceInput`` to follow the external sync pulse from a sync device at the given frame duration.
376        ///
377        /// Your provided `videoFrameDuration` value must match the sync pulse duration of the external sync device. If it does not, the request times out, the external sync device's status returns to ``AVExternalSyncDeviceStatusReady``, and your session stops running, posting a ``AVCaptureSessionRuntimeErrorNotification`` with ``AVErrorFollowExternalSyncDeviceTimedOut``.
378        ///
379        /// The ability to follow an external sync device may change depending on the device configuration. For example, ``followExternalSyncDevice:videoFrameDuration:delegate:`` cannot be used when ``AVCaptureDevice/autoVideoFrameRateEnabled`` is `true`.
380        ///
381        /// To stop following an external pulse, call ``unfollowExternalSyncDevice``. External sync device following is also disabled when your device's ``AVCaptureDeviceFormat`` changes.
382        ///
383        /// Your provided delegate's ``AVExternalSyncDeviceDelegate/externalSyncDeviceStatusDidChange:`` method is called with a status of ``AVExternalSyncDeviceStatusReady`` if the external pulse signal is not close enough to the provided `videoFrameDuration` for successful calibration.
384        ///
385        /// Once your ``AVExternalSyncDevice/status`` changes to ``AVExternalSyncDeviceStatusActiveSync``, your input's  ``AVCaptureInput/activeExternalSyncVideoFrameDuration`` property reports the up-to-date frame duration. ``AVCaptureInput/activeExternalSyncVideoFrameDuration`` is also reflected in the ``AVCaptureDevice/activeVideoMinFrameDuration`` and ``AVCaptureDevice/activeVideoMaxFrameDuration`` of your input's associated device.
386        ///
387        /// - Note: Calling this method may cause a lengthy reconfiguration of the receiver, similar to setting a new active format or ``AVCaptureSession/sessionPreset``.
388        ///
389        /// - Important: Calling this method throws an `NSInvalidArgumentException` if ``AVCaptureDeviceInput/externalSyncSupported`` returns `false`.
390        ///
391        /// - Important: The provided external sync device's ``status`` must be ``AVExternalSyncDeviceStatusReady`` when you call this method, otherwise an `NSInvalidArgumentException` is thrown.
392        #[unsafe(method(followExternalSyncDevice:videoFrameDuration:delegate:))]
393        #[unsafe(method_family = none)]
394        pub unsafe fn followExternalSyncDevice_videoFrameDuration_delegate(
395            &self,
396            external_sync_device: &AVExternalSyncDevice,
397            frame_duration: CMTime,
398            delegate: Option<&ProtocolObject<dyn AVExternalSyncDeviceDelegate>>,
399        );
400
401        #[cfg(feature = "objc2-core-media")]
402        /// The receiver's external sync frame duration (the reciprocal of its frame rate) when being driven by an external sync device.
403        ///
404        /// Set up your input to follow an external sync device by calling ``followExternalSyncDevice:videoFrameDuration:delegate:``.
405        /// - Note: The value of this readonly property is `kCMTimeInvalid` unless the ``AVExternalSyncDevice`` is actively driving the ``AVCaptureDeviceInput``. This is reflected by the ``AVExternalSyncDevice/status`` being either ``AVExternalSyncDeviceStatusActiveSync`` or ``AVExternalSyncDeviceStatusFreeRunSync``.
406        #[unsafe(method(activeExternalSyncVideoFrameDuration))]
407        #[unsafe(method_family = none)]
408        pub unsafe fn activeExternalSyncVideoFrameDuration(&self) -> CMTime;
409
410        #[cfg(feature = "AVExternalSyncDevice")]
411        /// The external sync device currently being followed by this input.
412        ///
413        /// This readonly property returns the ``AVExternalSyncDevice`` instance you provided in ``followExternalSyncDevice:videoFrameDuration:delegate:``. This property returns `nil` when an external sync device is disconnected or fails to calibrate.
414        #[unsafe(method(externalSyncDevice))]
415        #[unsafe(method_family = none)]
416        pub unsafe fn externalSyncDevice(&self) -> Option<Retained<AVExternalSyncDevice>>;
417
418        /// Discontinues external sync.
419        ///
420        /// This method stops your input from syncing to the external sync device you specified in ``followExternalSyncDevice:videoFrameDuration:delegate:``.
421        #[unsafe(method(unfollowExternalSyncDevice))]
422        #[unsafe(method_family = none)]
423        pub unsafe fn unfollowExternalSyncDevice(&self);
424
425        /// Returns whether the receiver supports the given multichannel audio mode.
426        ///
427        ///
428        /// Parameter `multichannelAudioMode`: An AVCaptureMultichannelAudioMode to be checked.
429        ///
430        /// Returns: YES if the receiver supports the given multichannel audio mode, NO otherwise.
431        ///
432        ///
433        /// The receiver's multichannelAudioMode property can only be set to a certain mode if this method returns YES for that mode.
434        ///
435        /// Multichannel audio modes are not supported when used in conjunction with AVCaptureMultiCamSession.
436        #[unsafe(method(isMultichannelAudioModeSupported:))]
437        #[unsafe(method_family = none)]
438        pub unsafe fn isMultichannelAudioModeSupported(
439            &self,
440            multichannel_audio_mode: AVCaptureMultichannelAudioMode,
441        ) -> bool;
442
443        /// Indicates the multichannel audio mode to apply when recording audio.
444        ///
445        ///
446        /// This property only takes effect when audio is being routed through the built-in microphone, and is ignored if an external microphone is in use.
447        ///
448        /// The default value is AVCaptureMultichannelAudioModeNone, in which case the default single channel audio recording is used.
449        #[unsafe(method(multichannelAudioMode))]
450        #[unsafe(method_family = none)]
451        pub unsafe fn multichannelAudioMode(&self) -> AVCaptureMultichannelAudioMode;
452
453        /// Setter for [`multichannelAudioMode`][Self::multichannelAudioMode].
454        #[unsafe(method(setMultichannelAudioMode:))]
455        #[unsafe(method_family = none)]
456        pub unsafe fn setMultichannelAudioMode(
457            &self,
458            multichannel_audio_mode: AVCaptureMultichannelAudioMode,
459        );
460
461        /// Returns whether or not the device supports wind noise removal during audio capture.
462        ///
463        ///
464        /// YES if the device supports wind noise removal, NO otherwise.
465        #[unsafe(method(isWindNoiseRemovalSupported))]
466        #[unsafe(method_family = none)]
467        pub unsafe fn isWindNoiseRemovalSupported(&self) -> bool;
468
469        /// Specifies whether or not wind noise is removed during audio capture.
470        ///
471        ///
472        /// Wind noise removal is available when the AVCaptureDeviceInput multichannelAudioMode property is set to any value other than AVCaptureMultichannelAudioModeNone.
473        #[unsafe(method(isWindNoiseRemovalEnabled))]
474        #[unsafe(method_family = none)]
475        pub unsafe fn isWindNoiseRemovalEnabled(&self) -> bool;
476
477        /// Setter for [`isWindNoiseRemovalEnabled`][Self::isWindNoiseRemovalEnabled].
478        #[unsafe(method(setWindNoiseRemovalEnabled:))]
479        #[unsafe(method_family = none)]
480        pub unsafe fn setWindNoiseRemovalEnabled(&self, wind_noise_removal_enabled: bool);
481
482        /// A BOOL value specifying whether Cinematic Video capture is supported.
483        ///
484        /// With Cinematic Video capture, you get a simulated depth-of-field effect that keeps your subjects (people, pets, and more) in sharp focus while applying a pleasing blur to the background (or foreground). Depending on the focus mode (see ``AVCaptureCinematicVideoFocusMode`` for detail), the camera either uses machine learning to automatically detect and focus on subjects in the scene, or it fixes focus on a subject until it exits the scene. Cinematic Videos can be played back and edited using the Cinematic framework.
485        ///
486        /// You can adjust the video's simulated aperture before starting a recording using the ``simulatedAperture`` property. With Cinematic Video specific focus methods on ``AVCaptureDevice``, you can dynamically control focus transitions.
487        ///
488        /// Movie files captured with Cinematic Video enabled can be played back and edited with the [Cinematic framework] (https://developer.apple.com/documentation/cinematic/playing-and-editing-cinematic-mode-video?language=objc).
489        ///
490        /// This property returns `true` if the session's current configuration allows Cinematic Video capture. When switching cameras or formats, this property may change. When this property changes from `true` to `false`, ``cinematicVideoCaptureEnabled`` also reverts to `false`. If you've previously opted in for Cinematic Video capture and then change configuration, you may need to set ``cinematicVideoCaptureEnabled`` to `true` again. This property is key-value observable.
491        ///
492        /// - Note: ``AVCaptureDepthDataOutput`` is not supported when ``cinematicVideoCaptureEnabled`` is set to `true`. Running an ``AVCaptureSession`` with both of these features throws an `NSInvalidArgumentException`.
493        #[unsafe(method(isCinematicVideoCaptureSupported))]
494        #[unsafe(method_family = none)]
495        pub unsafe fn isCinematicVideoCaptureSupported(&self) -> bool;
496
497        /// A BOOL value specifying whether the Cinematic Video effect is being applied to any movie file output, video data output, metadata output, or video preview layer added to the capture session.
498        ///
499        /// Default is `false`. Set to `true` to enable support for Cinematic Video capture.
500        ///
501        /// When you set this property to `true`, your input's associated ``AVCaptureDevice/focusMode`` changes to ``AVCaptureFocusModeContinuousAutoFocus``. While Cinematic Video capture is enabled, you are not permitted to change your device's focus mode, and any attempt to do so results in an `NSInvalidArgumentException`. You may only set this property to `true` if ``cinematicVideoCaptureSupported`` is `true`.
502        ///
503        /// - Note: Enabling Cinematic Video capture requires a lengthy reconfiguration of the capture render pipeline, so if you intend to capture Cinematic Video, you should set this property to `true` before calling ``AVCaptureSession/startRunning`` or within ``AVCaptureSession/beginConfiguration`` and ``AVCaptureSession/commitConfiguration`` while running.
504        #[unsafe(method(isCinematicVideoCaptureEnabled))]
505        #[unsafe(method_family = none)]
506        pub unsafe fn isCinematicVideoCaptureEnabled(&self) -> bool;
507
508        /// Setter for [`isCinematicVideoCaptureEnabled`][Self::isCinematicVideoCaptureEnabled].
509        #[unsafe(method(setCinematicVideoCaptureEnabled:))]
510        #[unsafe(method_family = none)]
511        pub unsafe fn setCinematicVideoCaptureEnabled(&self, cinematic_video_capture_enabled: bool);
512
513        /// Shallow depth of field simulated aperture.
514        ///
515        /// When capturing a Cinematic Video, use this property to control the amount of blur in the simulated depth of field effect.
516        ///
517        /// This property only takes effect when ``cinematicVideoCaptureEnabled`` is set to `true`.
518        ///
519        /// - Important: Setting this property to a value less than the ``AVCaptureDevice/activeFormat/minSimulatedAperture`` or greater than the ``AVCaptureDevice/activeFormat/maxSimulatedAperture`` throws an `NSRangeException`. you may only set this property if ``AVCaptureDevice/activeFormat/minSimulatedAperture`` returns a non-zero value, otherwise an `NSInvalidArgumentException` is thrown. You must set this property before starting a Cinematic Video capture. If you attempt to set it while a recording is in progress, an `NSInvalidArgumentException` is thrown.
520        ///
521        /// This property is initialized to the associated ``AVCaptureDevice/activeFormat/defaultSimulatedAperture``.
522        ///
523        /// This property is key-value observable.
524        #[unsafe(method(simulatedAperture))]
525        #[unsafe(method_family = none)]
526        pub unsafe fn simulatedAperture(&self) -> c_float;
527
528        /// Setter for [`simulatedAperture`][Self::simulatedAperture].
529        #[unsafe(method(setSimulatedAperture:))]
530        #[unsafe(method_family = none)]
531        pub unsafe fn setSimulatedAperture(&self, simulated_aperture: c_float);
532    );
533}
534
535/// Methods declared on superclass `AVCaptureInput`.
536impl AVCaptureDeviceInput {
537    extern_methods!(
538        #[unsafe(method(init))]
539        #[unsafe(method_family = init)]
540        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
541
542        #[unsafe(method(new))]
543        #[unsafe(method_family = new)]
544        pub unsafe fn new() -> Retained<Self>;
545    );
546}
547
548extern_class!(
549    /// AVCaptureScreenInput is a concrete subclass of AVCaptureInput that provides an interface for capturing media from a screen or portion thereof.
550    ///
551    ///
552    /// Instances of AVCaptureScreenInput are input sources for AVCaptureSession that provide media data from one of the screens connected to the system, represented by CGDirectDisplayIDs.
553    ///
554    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturescreeninput?language=objc)
555    #[unsafe(super(AVCaptureInput, NSObject))]
556    #[derive(Debug, PartialEq, Eq, Hash)]
557    pub struct AVCaptureScreenInput;
558);
559
560extern_conformance!(
561    unsafe impl NSObjectProtocol for AVCaptureScreenInput {}
562);
563
564impl AVCaptureScreenInput {
565    extern_methods!(
566        /// Creates an AVCaptureScreenInput instance that provides media data from the main display.
567        ///
568        ///
569        /// This method creates an instance of AVCaptureScreenInput using the main display whose id is returned from CGMainDisplayID().
570        #[unsafe(method(init))]
571        #[unsafe(method_family = init)]
572        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
573
574        #[unsafe(method(new))]
575        #[unsafe(method_family = new)]
576        pub unsafe fn new() -> Retained<Self>;
577
578        #[cfg(feature = "objc2-core-graphics")]
579        /// Creates an AVCaptureScreenInput instance that provides media data from the given display.
580        ///
581        ///
582        /// Parameter `displayID`: The id of the display from which to capture video. CGDirectDisplayID is defined in
583        /// <CoreGraphics
584        /// /CGDirectDisplay.h>
585        ///
586        /// Returns: An AVCaptureScreenInput instance that provides data from the given screen, or nil, if the screen could not be used for capture.
587        ///
588        ///
589        /// This method creates an instance of AVCaptureScreenInput that can be used to capture data from a display in an AVCaptureSession. This method validates the displayID. If the display cannot be used because it is not available on the system, for example, this method returns nil.
590        #[unsafe(method(initWithDisplayID:))]
591        #[unsafe(method_family = init)]
592        pub unsafe fn initWithDisplayID(
593            this: Allocated<Self>,
594            display_id: CGDirectDisplayID,
595        ) -> Option<Retained<Self>>;
596
597        #[cfg(feature = "objc2-core-media")]
598        /// A property indicating the screen input's minimum frame duration.
599        ///
600        ///
601        /// An AVCaptureScreenInput's minFrameDuration is the reciprocal of its maximum frame rate. This property may be used to request a maximum frame rate at which the input produces video frames. The requested rate may not be achievable due to overall bandwidth, so actual frame rates may be lower.
602        #[unsafe(method(minFrameDuration))]
603        #[unsafe(method_family = none)]
604        pub unsafe fn minFrameDuration(&self) -> CMTime;
605
606        #[cfg(feature = "objc2-core-media")]
607        /// Setter for [`minFrameDuration`][Self::minFrameDuration].
608        #[unsafe(method(setMinFrameDuration:))]
609        #[unsafe(method_family = none)]
610        pub unsafe fn setMinFrameDuration(&self, min_frame_duration: CMTime);
611
612        #[cfg(feature = "objc2-core-foundation")]
613        /// A property indicating the bounding rectangle of the screen area to be captured in points.
614        ///
615        ///
616        /// By default, AVCaptureScreenInput captures the entire area of the displayID with which it is associated. To limit the capture rectangle to a subsection of the screen, set the cropRect property, which defines a smaller section of the screen in the screen's coordinate system. The origin (0,0) is the bottom-left corner of the screen.
617        #[unsafe(method(cropRect))]
618        #[unsafe(method_family = none)]
619        pub unsafe fn cropRect(&self) -> CGRect;
620
621        #[cfg(feature = "objc2-core-foundation")]
622        /// Setter for [`cropRect`][Self::cropRect].
623        #[unsafe(method(setCropRect:))]
624        #[unsafe(method_family = none)]
625        pub unsafe fn setCropRect(&self, crop_rect: CGRect);
626
627        #[cfg(feature = "objc2-core-foundation")]
628        /// A property indicating the factor by which video buffers captured from the screen are to be scaled.
629        ///
630        ///
631        /// By default, AVCaptureScreenInput captures the video buffers from the display at a scale factor of 1.0 (no scaling). Set this property to scale the buffers by a given factor. For instance, a 320x240 capture area with a scaleFactor of 2.0f produces video buffers at 640x480.
632        #[unsafe(method(scaleFactor))]
633        #[unsafe(method_family = none)]
634        pub unsafe fn scaleFactor(&self) -> CGFloat;
635
636        #[cfg(feature = "objc2-core-foundation")]
637        /// Setter for [`scaleFactor`][Self::scaleFactor].
638        #[unsafe(method(setScaleFactor:))]
639        #[unsafe(method_family = none)]
640        pub unsafe fn setScaleFactor(&self, scale_factor: CGFloat);
641
642        /// A property indicating whether mouse clicks should be highlighted in the captured output.
643        ///
644        ///
645        /// By default, AVCaptureScreenInput does not highlight mouse clicks in its captured output. If this property is set to YES, mouse clicks are highlighted (a circle is drawn around the mouse for the duration of the click) in the captured output.
646        #[unsafe(method(capturesMouseClicks))]
647        #[unsafe(method_family = none)]
648        pub unsafe fn capturesMouseClicks(&self) -> bool;
649
650        /// Setter for [`capturesMouseClicks`][Self::capturesMouseClicks].
651        #[unsafe(method(setCapturesMouseClicks:))]
652        #[unsafe(method_family = none)]
653        pub unsafe fn setCapturesMouseClicks(&self, captures_mouse_clicks: bool);
654
655        /// A property indicating whether the cursor should be rendered to the captured output.
656        ///
657        ///
658        /// By default, AVCaptureScreenInput draws the cursor in its captured output. If this property is set to NO, the captured output contains only the windows on the screen. Cursor is omitted. Note that cursor position and mouse button state at the time of capture is preserved in CMSampleBuffers emitted from AVCaptureScreenInput. See the inline documentation for kCMIOSampleBufferAttachmentKey_MouseAndKeyboardModifiers in
659        /// <CoreMediaIO
660        /// /CMIOSampleBuffer.h>
661        #[unsafe(method(capturesCursor))]
662        #[unsafe(method_family = none)]
663        pub unsafe fn capturesCursor(&self) -> bool;
664
665        /// Setter for [`capturesCursor`][Self::capturesCursor].
666        #[unsafe(method(setCapturesCursor:))]
667        #[unsafe(method_family = none)]
668        pub unsafe fn setCapturesCursor(&self, captures_cursor: bool);
669
670        /// A property indicating whether duplicate frames should be removed by the input.
671        ///
672        ///
673        /// If this property is set to YES, AVCaptureScreenInput performs frame differencing and when it detects duplicate frames, it drops them. If set to NO, the captured output receives all frames from the input. Prior to 10.9 this value defaulted to YES. In 10.9 and later, it defaults to NO, as modern platforms support frame differencing in hardware-based encoders.
674        ///
675        /// As of 10.10, this property has been deprecated and is ignored. Clients wishing to re-create this functionality can use an AVCaptureVideoDataOutput and compare frame contents in their own code. If they wish to write a movie file, they can then pass the unique frames to an AVAssetWriterInput.
676        #[deprecated = "No longer supported."]
677        #[unsafe(method(removesDuplicateFrames))]
678        #[unsafe(method_family = none)]
679        pub unsafe fn removesDuplicateFrames(&self) -> bool;
680
681        /// Setter for [`removesDuplicateFrames`][Self::removesDuplicateFrames].
682        #[deprecated = "No longer supported."]
683        #[unsafe(method(setRemovesDuplicateFrames:))]
684        #[unsafe(method_family = none)]
685        pub unsafe fn setRemovesDuplicateFrames(&self, removes_duplicate_frames: bool);
686    );
687}
688
689extern_class!(
690    /// AVCaptureMetadataInput is a concrete subclass of AVCaptureInput that provides a way for clients to supply AVMetadataItems to an AVCaptureSession.
691    ///
692    ///
693    /// Instances of AVCaptureMetadataInput are input sources for AVCaptureSession that provide AVMetadataItems to an AVCaptureSession. AVCaptureMetadataInputs present one and only one AVCaptureInputPort, which currently may only be connected to an AVCaptureMovieFileOutput. The metadata supplied over the input port is provided by the client, and must conform to a client-supplied CMFormatDescription. The AVMetadataItems are supplied in an AVTimedMetadataGroup.
694    ///
695    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturemetadatainput?language=objc)
696    #[unsafe(super(AVCaptureInput, NSObject))]
697    #[derive(Debug, PartialEq, Eq, Hash)]
698    pub struct AVCaptureMetadataInput;
699);
700
701extern_conformance!(
702    unsafe impl NSObjectProtocol for AVCaptureMetadataInput {}
703);
704
705impl AVCaptureMetadataInput {
706    extern_methods!(
707        #[cfg(feature = "objc2-core-media")]
708        /// Returns an AVCaptureMetadataInput instance that allows a client to provide AVTimedMetadataGroups to an AVCaptureSession.
709        ///
710        ///
711        /// Parameter `desc`: A CMFormatDescription that defines the metadata to be supplied by the client. Throws an NSInvalidArgumentException if NULL is passed.
712        ///
713        /// Parameter `clock`: A CMClock that provided the timebase for the supplied samples. Throws an NSInvalidArgumentException if NULL is passed.
714        ///
715        /// Returns: An AVCaptureMetadataInput instance.
716        ///
717        ///
718        /// This method returns an instance of AVCaptureMetadataInput that can be used to capture AVTimedMetadataGroups supplied by the client to an AVCaptureSession.
719        #[unsafe(method(metadataInputWithFormatDescription:clock:))]
720        #[unsafe(method_family = none)]
721        pub unsafe fn metadataInputWithFormatDescription_clock(
722            desc: &CMMetadataFormatDescription,
723            clock: &CMClock,
724        ) -> Retained<Self>;
725
726        #[cfg(feature = "objc2-core-media")]
727        /// Creates an AVCaptureMetadataInput instance that allows a client to provide AVTimedMetadataGroups to an AVCaptureSession.
728        ///
729        ///
730        /// Parameter `desc`: A CMFormatDescription that defines the metadata to be supplied by the client. Throws NSInvalidArgumentException if NULL is passed.
731        ///
732        /// Parameter `clock`: A CMClock that provided the timebase for the supplied samples. Throws NSInvalidArgumentException if NULL is passed.
733        ///
734        /// Returns: An AVCaptureMetadataInput instance, or nil, if the device could not be used for capture.
735        ///
736        ///
737        /// This method creates an instance of AVCaptureMetadataInput that can be used to capture AVTimedMetadataGroups supplied by the client to an AVCaptureSession.
738        #[unsafe(method(initWithFormatDescription:clock:))]
739        #[unsafe(method_family = init)]
740        pub unsafe fn initWithFormatDescription_clock(
741            this: Allocated<Self>,
742            desc: &CMMetadataFormatDescription,
743            clock: &CMClock,
744        ) -> Retained<Self>;
745
746        #[cfg(feature = "AVTimedMetadataGroup")]
747        /// Provides metadata to the AVCaptureSession.
748        ///
749        ///
750        /// Parameter `metadata`: An AVTimedMetadataGroup of metadata. Will throw an exception if nil. In order to denote a period of no metadata, an empty AVTimedMetadataGroup should be passed.
751        ///
752        ///
753        /// The provided AVTimedMetadataGroup will be provided to the AVCaptureSession. The group's presentation timestamp is expressed in the context of the clock supplied to the initializer. It is not required that the AVTimedMetadataGroup have a duration; an empty AVTimedMetadataGroup can be supplied to denote a period of no metadata.
754        #[unsafe(method(appendTimedMetadataGroup:error:_))]
755        #[unsafe(method_family = none)]
756        pub unsafe fn appendTimedMetadataGroup_error(
757            &self,
758            metadata: &AVTimedMetadataGroup,
759        ) -> Result<(), Retained<NSError>>;
760    );
761}
762
763/// Methods declared on superclass `AVCaptureInput`.
764impl AVCaptureMetadataInput {
765    extern_methods!(
766        #[unsafe(method(init))]
767        #[unsafe(method_family = init)]
768        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
769
770        #[unsafe(method(new))]
771        #[unsafe(method_family = new)]
772        pub unsafe fn new() -> Retained<Self>;
773    );
774}