objc2_av_foundation/generated/AVCaptureMetadataOutput.rs
1//! This file has been automatically generated by `objc2`'s `header-translator`.
2//! DO NOT EDIT
3use core::ffi::*;
4use core::ptr::NonNull;
5#[cfg(feature = "dispatch2")]
6use dispatch2::*;
7use objc2::__framework_prelude::*;
8#[cfg(feature = "objc2-core-foundation")]
9use objc2_core_foundation::*;
10use objc2_foundation::*;
11
12use crate::*;
13
14extern_class!(
15 /// AVCaptureMetadataOutput is a concrete subclass of AVCaptureOutput that can be used to process metadata objects from an attached connection.
16 ///
17 ///
18 /// Instances of AVCaptureMetadataOutput emit arrays of AVMetadataObject instances (see AVMetadataObject.h), such as detected faces. Applications can access the metadata objects with the captureOutput:didOutputMetadataObjects:fromConnection: delegate method.
19 ///
20 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturemetadataoutput?language=objc)
21 #[unsafe(super(AVCaptureOutput, NSObject))]
22 #[derive(Debug, PartialEq, Eq, Hash)]
23 #[cfg(feature = "AVCaptureOutputBase")]
24 pub struct AVCaptureMetadataOutput;
25);
26
27#[cfg(feature = "AVCaptureOutputBase")]
28extern_conformance!(
29 unsafe impl NSObjectProtocol for AVCaptureMetadataOutput {}
30);
31
32#[cfg(feature = "AVCaptureOutputBase")]
33impl AVCaptureMetadataOutput {
34 extern_methods!(
35 #[unsafe(method(init))]
36 #[unsafe(method_family = init)]
37 pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
38
39 #[unsafe(method(new))]
40 #[unsafe(method_family = new)]
41 pub unsafe fn new() -> Retained<Self>;
42
43 #[cfg(feature = "dispatch2")]
44 /// Sets the receiver's delegate that will accept metadata objects and dispatch queue on which the delegate will be called.
45 ///
46 ///
47 /// Parameter `objectsDelegate`: An object conforming to the AVCaptureMetadataOutputObjectsDelegate protocol that will receive metadata objects after they are captured.
48 ///
49 /// Parameter `objectsCallbackQueue`: A dispatch queue on which all delegate methods will be called.
50 ///
51 ///
52 /// When new metadata objects are captured in the receiver's connection, they will be vended to the delegate using the captureOutput:didOutputMetadataObjects:fromConnection: delegate method. All delegate methods will be called on the specified dispatch queue.
53 ///
54 /// Clients that need to minimize the chances of metadata being dropped should specify a queue on which a sufficiently small amount of processing is performed along with receiving metadata objects.
55 ///
56 /// A serial dispatch queue must be used to guarantee that metadata objects will be delivered in order. The objectsCallbackQueue parameter may not be NULL, except when setting the objectsDelegate to nil otherwise -setMetadataObjectsDelegate:queue: throws an NSInvalidArgumentException.
57 ///
58 /// # Safety
59 ///
60 /// `objects_callback_queue` possibly has additional threading requirements.
61 #[unsafe(method(setMetadataObjectsDelegate:queue:))]
62 #[unsafe(method_family = none)]
63 pub unsafe fn setMetadataObjectsDelegate_queue(
64 &self,
65 objects_delegate: Option<&ProtocolObject<dyn AVCaptureMetadataOutputObjectsDelegate>>,
66 objects_callback_queue: Option<&DispatchQueue>,
67 );
68
69 /// The receiver's delegate.
70 ///
71 ///
72 /// The value of this property is an object conforming to the AVCaptureMetadataOutputObjectsDelegate protocol that will receive metadata objects after they are captured. The delegate is set using the setMetadataObjectsDelegate:queue: method.
73 #[unsafe(method(metadataObjectsDelegate))]
74 #[unsafe(method_family = none)]
75 pub unsafe fn metadataObjectsDelegate(
76 &self,
77 ) -> Option<Retained<ProtocolObject<dyn AVCaptureMetadataOutputObjectsDelegate>>>;
78
79 #[cfg(feature = "dispatch2")]
80 /// The dispatch queue on which all metadata object delegate methods will be called.
81 ///
82 ///
83 /// The value of this property is a dispatch_queue_t. The queue is set using the setMetadataObjectsDelegate:queue: method.
84 #[unsafe(method(metadataObjectsCallbackQueue))]
85 #[unsafe(method_family = none)]
86 pub unsafe fn metadataObjectsCallbackQueue(&self) -> Option<Retained<DispatchQueue>>;
87
88 #[cfg(feature = "AVMetadataObject")]
89 /// Indicates the receiver's supported metadata object types.
90 ///
91 ///
92 /// The value of this property is an NSArray of NSStrings corresponding to AVMetadataObjectType strings defined in AVMetadataObject.h -- one for each metadata object type supported by the receiver. Available metadata object types are dependent on the capabilities of the AVCaptureInputPort to which this receiver's AVCaptureConnection is connected. Clients may specify the types of objects they would like to process by calling setMetadataObjectTypes:. This property is key-value observable.
93 #[unsafe(method(availableMetadataObjectTypes))]
94 #[unsafe(method_family = none)]
95 pub unsafe fn availableMetadataObjectTypes(
96 &self,
97 ) -> Retained<NSArray<AVMetadataObjectType>>;
98
99 #[cfg(feature = "AVMetadataObject")]
100 /// Specifies the types of metadata objects that the receiver should present to the client.
101 ///
102 ///
103 /// AVCaptureMetadataOutput may detect and emit multiple metadata object types. For apps linked before iOS 7.0, the receiver defaults to capturing face metadata objects if supported (see -availableMetadataObjectTypes). For apps linked on or after iOS 7.0, the receiver captures no metadata objects by default. -setMetadataObjectTypes: throws an NSInvalidArgumentException if any elements in the array are not present in the -availableMetadataObjectTypes array.
104 ///
105 /// If you've set your AVCaptureMetadataOutput's connected input's `cinematicVideoCaptureEnabled` property to YES, you must set your `metadataObjectTypes` property to `requiredMetadataObjectTypesForCinematicVideoCapture` or an NSInvalidArgumentException is thrown.
106 #[unsafe(method(metadataObjectTypes))]
107 #[unsafe(method_family = none)]
108 pub unsafe fn metadataObjectTypes(&self) -> Retained<NSArray<AVMetadataObjectType>>;
109
110 #[cfg(feature = "AVMetadataObject")]
111 /// Setter for [`metadataObjectTypes`][Self::metadataObjectTypes].
112 ///
113 /// This is [copied][objc2_foundation::NSCopying::copy] when set.
114 #[unsafe(method(setMetadataObjectTypes:))]
115 #[unsafe(method_family = none)]
116 pub unsafe fn setMetadataObjectTypes(
117 &self,
118 metadata_object_types: Option<&NSArray<AVMetadataObjectType>>,
119 );
120
121 #[cfg(feature = "objc2-core-foundation")]
122 /// Specifies a rectangle of interest for limiting the search area for visual metadata.
123 ///
124 ///
125 /// The value of this property is a CGRect that determines the receiver's rectangle of interest for each frame of video. The rectangle's origin is top left and is relative to the coordinate space of the device providing the metadata. Specifying a rectOfInterest may improve detection performance for certain types of metadata. The default value of this property is the value CGRectMake(0, 0, 1, 1). Metadata objects whose bounds do not intersect with the rectOfInterest will not be returned.
126 ///
127 /// As of iOS 13, this property can be set without requiring a lengthy rebuild of the session in which video preview is disrupted.
128 #[unsafe(method(rectOfInterest))]
129 #[unsafe(method_family = none)]
130 pub unsafe fn rectOfInterest(&self) -> CGRect;
131
132 #[cfg(feature = "objc2-core-foundation")]
133 /// Setter for [`rectOfInterest`][Self::rectOfInterest].
134 #[unsafe(method(setRectOfInterest:))]
135 #[unsafe(method_family = none)]
136 pub unsafe fn setRectOfInterest(&self, rect_of_interest: CGRect);
137
138 #[cfg(feature = "AVMetadataObject")]
139 /// The required metadata object types when Cinematic Video capture is enabled.
140 ///
141 /// Since the Cinematic Video algorithm requires a particular set of metadata objects to function optimally, you must set your ``metadataObjectTypes`` property to this property's returned value if you've set ``AVCaptureDeviceInput/cinematicVideoCaptureEnabled`` to `true` on the connected device input, otherwise an `NSInvalidArgumentException` is thrown.
142 #[unsafe(method(requiredMetadataObjectTypesForCinematicVideoCapture))]
143 #[unsafe(method_family = none)]
144 pub unsafe fn requiredMetadataObjectTypesForCinematicVideoCapture(
145 &self,
146 ) -> Retained<NSArray<AVMetadataObjectType>>;
147 );
148}
149
150extern_protocol!(
151 /// Defines an interface for delegates of AVCaptureMetadataOutput to receive emitted objects.
152 ///
153 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturemetadataoutputobjectsdelegate?language=objc)
154 pub unsafe trait AVCaptureMetadataOutputObjectsDelegate: NSObjectProtocol {
155 #[cfg(all(
156 feature = "AVCaptureOutputBase",
157 feature = "AVCaptureSession",
158 feature = "AVMetadataObject"
159 ))]
160 /// Called whenever an AVCaptureMetadataOutput instance emits new objects through a connection.
161 ///
162 ///
163 /// Parameter `output`: The AVCaptureMetadataOutput instance that emitted the objects.
164 ///
165 /// Parameter `metadataObjects`: An array of AVMetadataObject subclasses (see AVMetadataObject.h).
166 ///
167 /// Parameter `connection`: The AVCaptureConnection through which the objects were emitted.
168 ///
169 ///
170 /// Delegates receive this message whenever the output captures and emits new objects, as specified by its metadataObjectTypes property. Delegates can use the provided objects in conjunction with other APIs for further processing. This method will be called on the dispatch queue specified by the output's metadataObjectsCallbackQueue property. This method may be called frequently, so it must be efficient to prevent capture performance problems, including dropped metadata objects.
171 ///
172 /// Clients that need to reference metadata objects outside of the scope of this method must retain them and then release them when they are finished with them.
173 #[optional]
174 #[unsafe(method(captureOutput:didOutputMetadataObjects:fromConnection:))]
175 #[unsafe(method_family = none)]
176 unsafe fn captureOutput_didOutputMetadataObjects_fromConnection(
177 &self,
178 output: &AVCaptureMetadataOutput,
179 metadata_objects: &NSArray<AVMetadataObject>,
180 connection: &AVCaptureConnection,
181 );
182 }
183);