objc2_av_foundation/generated/
AVVideoCompositing.rs

1//! This file has been automatically generated by `objc2`'s `header-translator`.
2//! DO NOT EDIT
3use core::ffi::*;
4use core::ptr::NonNull;
5use objc2::__framework_prelude::*;
6#[cfg(feature = "objc2-core-foundation")]
7use objc2_core_foundation::*;
8#[cfg(feature = "objc2-core-image")]
9#[cfg(not(target_os = "watchos"))]
10use objc2_core_image::*;
11#[cfg(feature = "objc2-core-media")]
12use objc2_core_media::*;
13#[cfg(feature = "objc2-core-video")]
14use objc2_core_video::*;
15use objc2_foundation::*;
16
17use crate::*;
18
19/// [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avpixelaspectratio?language=objc)
20#[repr(C)]
21#[derive(Clone, Copy, Debug, PartialEq)]
22pub struct AVPixelAspectRatio {
23    pub horizontalSpacing: NSInteger,
24    pub verticalSpacing: NSInteger,
25}
26
27unsafe impl Encode for AVPixelAspectRatio {
28    const ENCODING: Encoding =
29        Encoding::Struct("?", &[<NSInteger>::ENCODING, <NSInteger>::ENCODING]);
30}
31
32unsafe impl RefEncode for AVPixelAspectRatio {
33    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
34}
35
36/// [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avedgewidths?language=objc)
37#[cfg(feature = "objc2-core-foundation")]
38#[repr(C)]
39#[derive(Clone, Copy, Debug, PartialEq)]
40pub struct AVEdgeWidths {
41    pub left: CGFloat,
42    pub top: CGFloat,
43    pub right: CGFloat,
44    pub bottom: CGFloat,
45}
46
47#[cfg(feature = "objc2-core-foundation")]
48unsafe impl Encode for AVEdgeWidths {
49    const ENCODING: Encoding = Encoding::Struct(
50        "?",
51        &[
52            <CGFloat>::ENCODING,
53            <CGFloat>::ENCODING,
54            <CGFloat>::ENCODING,
55            <CGFloat>::ENCODING,
56        ],
57    );
58}
59
60#[cfg(feature = "objc2-core-foundation")]
61unsafe impl RefEncode for AVEdgeWidths {
62    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
63}
64
65extern_class!(
66    /// The context in which custom compositors render pixel buffers.
67    ///
68    /// Subclasses of this type that are used from Swift must fulfill the requirements of a Sendable type.
69    ///
70    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avvideocompositionrendercontext?language=objc)
71    #[unsafe(super(NSObject))]
72    #[derive(Debug, PartialEq, Eq, Hash)]
73    pub struct AVVideoCompositionRenderContext;
74);
75
76unsafe impl Send for AVVideoCompositionRenderContext {}
77
78unsafe impl Sync for AVVideoCompositionRenderContext {}
79
80unsafe impl NSObjectProtocol for AVVideoCompositionRenderContext {}
81
82impl AVVideoCompositionRenderContext {
83    extern_methods!(
84        #[cfg(feature = "objc2-core-foundation")]
85        #[unsafe(method(size))]
86        #[unsafe(method_family = none)]
87        pub unsafe fn size(&self) -> CGSize;
88
89        #[cfg(feature = "objc2-core-foundation")]
90        #[unsafe(method(renderTransform))]
91        #[unsafe(method_family = none)]
92        pub unsafe fn renderTransform(&self) -> CGAffineTransform;
93
94        #[unsafe(method(renderScale))]
95        #[unsafe(method_family = none)]
96        pub unsafe fn renderScale(&self) -> c_float;
97
98        #[unsafe(method(pixelAspectRatio))]
99        #[unsafe(method_family = none)]
100        pub unsafe fn pixelAspectRatio(&self) -> AVPixelAspectRatio;
101
102        #[cfg(feature = "objc2-core-foundation")]
103        #[unsafe(method(edgeWidths))]
104        #[unsafe(method_family = none)]
105        pub unsafe fn edgeWidths(&self) -> AVEdgeWidths;
106
107        #[unsafe(method(highQualityRendering))]
108        #[unsafe(method_family = none)]
109        pub unsafe fn highQualityRendering(&self) -> bool;
110
111        #[cfg(feature = "AVVideoComposition")]
112        #[unsafe(method(videoComposition))]
113        #[unsafe(method_family = none)]
114        pub unsafe fn videoComposition(&self) -> Retained<AVVideoComposition>;
115
116        #[cfg(feature = "objc2-core-video")]
117        /// Vends a CVPixelBuffer to use for rendering
118        ///
119        /// The buffer will have its kCVImageBufferCleanApertureKey and kCVImageBufferPixelAspectRatioKey attachments set to match the current composition processor properties.
120        #[unsafe(method(newPixelBuffer))]
121        #[unsafe(method_family = new)]
122        pub unsafe fn newPixelBuffer(&self) -> Option<Retained<CVPixelBuffer>>;
123    );
124}
125
126/// Methods declared on superclass `NSObject`.
127impl AVVideoCompositionRenderContext {
128    extern_methods!(
129        #[unsafe(method(init))]
130        #[unsafe(method_family = init)]
131        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
132
133        #[unsafe(method(new))]
134        #[unsafe(method_family = new)]
135        pub unsafe fn new() -> Retained<Self>;
136    );
137}
138
139extern_class!(
140    /// [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avvideocompositionrenderhint?language=objc)
141    #[unsafe(super(NSObject))]
142    #[derive(Debug, PartialEq, Eq, Hash)]
143    pub struct AVVideoCompositionRenderHint;
144);
145
146unsafe impl Send for AVVideoCompositionRenderHint {}
147
148unsafe impl Sync for AVVideoCompositionRenderHint {}
149
150unsafe impl NSObjectProtocol for AVVideoCompositionRenderHint {}
151
152impl AVVideoCompositionRenderHint {
153    extern_methods!(
154        #[cfg(feature = "objc2-core-media")]
155        /// The start time of the upcoming composition requests.
156        #[unsafe(method(startCompositionTime))]
157        #[unsafe(method_family = none)]
158        pub unsafe fn startCompositionTime(&self) -> CMTime;
159
160        #[cfg(feature = "objc2-core-media")]
161        /// The end time of the upcoming composition requests.
162        #[unsafe(method(endCompositionTime))]
163        #[unsafe(method_family = none)]
164        pub unsafe fn endCompositionTime(&self) -> CMTime;
165    );
166}
167
168/// Methods declared on superclass `NSObject`.
169impl AVVideoCompositionRenderHint {
170    extern_methods!(
171        #[unsafe(method(init))]
172        #[unsafe(method_family = init)]
173        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
174
175        #[unsafe(method(new))]
176        #[unsafe(method_family = new)]
177        pub unsafe fn new() -> Retained<Self>;
178    );
179}
180
181extern_protocol!(
182    /// Defines properties and methods for custom video compositors
183    ///
184    /// For each AVFoundation object of class AVPlayerItem, AVAssetExportSession, AVAssetImageGenerator, or AVAssetReaderVideoCompositionOutput that has a non-nil value for its videoComposition property, when the value of the customVideoCompositorClass property of the AVVideoComposition is not Nil, AVFoundation creates and uses an instance of that custom video compositor class to process the instructions contained in the AVVideoComposition. The custom video compositor instance will be created when you invoke -setVideoComposition: with an instance of AVVideoComposition that's associated with a different custom video compositor class than the object was previously using.
185    ///
186    /// When creating instances of custom video compositors, AVFoundation initializes them by calling -init and then makes them available to you for further set-up or communication, if any is needed, as the value of the customVideoCompositor property of the object on which -setVideoComposition: was invoked.
187    ///
188    /// Custom video compositor instances will then be retained by the AVFoundation object for as long as the value of its videoComposition property indicates that an instance of the same custom video compositor class should be used, even if the value is changed from one instance of AVVideoComposition to another instance that's associated with the same custom video compositor class.
189    ///
190    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avvideocompositing?language=objc)
191    pub unsafe trait AVVideoCompositing: NSObjectProtocol {
192        #[unsafe(method(sourcePixelBufferAttributes))]
193        #[unsafe(method_family = none)]
194        unsafe fn sourcePixelBufferAttributes(
195            &self,
196        ) -> Option<Retained<NSDictionary<NSString, AnyObject>>>;
197
198        #[unsafe(method(requiredPixelBufferAttributesForRenderContext))]
199        #[unsafe(method_family = none)]
200        unsafe fn requiredPixelBufferAttributesForRenderContext(
201            &self,
202        ) -> Retained<NSDictionary<NSString, AnyObject>>;
203
204        /// Called to notify the custom compositor that a composition will switch to a different render context
205        ///
206        /// Parameter `newRenderContext`: The render context that will be handling the video composition from this point
207        ///
208        /// Instances of classes implementing the AVVideoComposting protocol can implement this method to be notified when
209        /// the AVVideoCompositionRenderContext instance handing a video composition changes. AVVideoCompositionRenderContext instances
210        /// being immutable, such a change will occur every time there is a change in the video composition parameters.
211        #[unsafe(method(renderContextChanged:))]
212        #[unsafe(method_family = none)]
213        unsafe fn renderContextChanged(&self, new_render_context: &AVVideoCompositionRenderContext);
214
215        /// Directs a custom video compositor object to create a new pixel buffer composed asynchronously from a collection of sources.
216        ///
217        /// Parameter `asyncVideoCompositionRequest`: An instance of AVAsynchronousVideoCompositionRequest that provides context for the requested composition.
218        ///
219        /// The custom compositor is expected to invoke, either subsequently or immediately, either:
220        /// -[AVAsynchronousVideoCompositionRequest finishWithComposedVideoFrame:] or
221        /// -[AVAsynchronousVideoCompositionRequest finishWithError:]. If you intend to finish rendering the frame after your
222        /// handling of this message returns, you must retain the instance of AVAsynchronousVideoCompositionRequest until after composition is finished.
223        /// Note that if the custom compositor's implementation of -startVideoCompositionRequest: returns without finishing the composition immediately,
224        /// it may be invoked again with another composition request before the prior request is finished; therefore in such cases the custom compositor should
225        /// be prepared to manage multiple composition requests.
226        ///
227        /// If the rendered frame is exactly the same as one of the source frames, with no letterboxing, pillboxing or cropping needed,
228        /// then the appropriate source pixel buffer may be returned (after CFRetain has been called on it).
229        #[unsafe(method(startVideoCompositionRequest:))]
230        #[unsafe(method_family = none)]
231        unsafe fn startVideoCompositionRequest(
232            &self,
233            async_video_composition_request: &AVAsynchronousVideoCompositionRequest,
234        );
235
236        /// Directs a custom video compositor object to cancel or finish all pending video composition requests
237        ///
238        /// When receiving this message, a custom video compositor must block until it has either cancelled all pending frame requests,
239        /// and called the finishCancelledRequest callback for each of them, or, if cancellation is not possible, finished processing of all the frames
240        /// and called the finishWithComposedVideoFrame: callback for each of them.
241        #[optional]
242        #[unsafe(method(cancelAllPendingVideoCompositionRequests))]
243        #[unsafe(method_family = none)]
244        unsafe fn cancelAllPendingVideoCompositionRequests(&self);
245
246        /// Indicates that clients can handle frames that contains wide color properties.
247        ///
248        ///
249        /// Controls whether the client will receive frames that contain wide color information. Care should be taken to avoid clamping.
250        #[optional]
251        #[unsafe(method(supportsWideColorSourceFrames))]
252        #[unsafe(method_family = none)]
253        unsafe fn supportsWideColorSourceFrames(&self) -> bool;
254
255        /// Indicates that the client's video compositor can handle frames that contain high dynamic range (HDR) properties.
256        ///
257        ///
258        /// Controls whether the client will receive frames that contain HDR information.
259        /// If this field is omitted or set to NO, the framework will convert HDR frames to standard dynamic range (SDR) with BT.709 transfer function before sending to the client.
260        /// If this field is set to YES, the value of supportsWideColorSourceFrames will be ignored and assumed to be YES.
261        #[optional]
262        #[unsafe(method(supportsHDRSourceFrames))]
263        #[unsafe(method_family = none)]
264        unsafe fn supportsHDRSourceFrames(&self) -> bool;
265
266        #[optional]
267        #[unsafe(method(canConformColorOfSourceFrames))]
268        #[unsafe(method_family = none)]
269        unsafe fn canConformColorOfSourceFrames(&self) -> bool;
270
271        /// Informs a custom video compositor about upcoming rendering requests.
272        ///
273        /// Parameter `renderHint`: Information about the upcoming composition requests.
274        ///
275        /// In the method the compositor can load composition resources such as overlay images which will be needed in the anticipated rendering time range.
276        ///
277        /// Unlike -startVideoCompositionRequest, which is invoked only when the frame compositing is necessary, the framework typically calls this method every frame duration. It allows the custom compositor to load and unload a composition resource such as overlay images at an appropriate timing.
278        ///
279        /// In forward playback, renderHint's startCompositionTime is less than endCompositionTime. In reverse playback, its endCompositionTime is less than startCompositionTime. For seeking, startCompositionTime == endCompositionTime, which means the upcoming composition request time range is unknown and the compositor shouldn’t preload time associated composition resources eagerly.
280        ///
281        /// The method is guaranteed to be called before -startVideoCompositionRequest: for a given composition time.
282        ///
283        /// The method is synchronous. The implementation should return quickly because otherwise the playback would stall and cause frame drops.
284        #[optional]
285        #[unsafe(method(anticipateRenderingUsingHint:))]
286        #[unsafe(method_family = none)]
287        unsafe fn anticipateRenderingUsingHint(&self, render_hint: &AVVideoCompositionRenderHint);
288
289        /// Tell a custom video compositor to perform any work in prerolling phase.
290        ///
291        /// Parameter `renderHint`: Information about the upcoming composition requests.
292        ///
293        /// The framework may perform prerolling to load media data to prime the render pipelines for smoother playback. This method is called in the prerolling phase so that the compositor can load composition resources such as overlay images which will be needed as soon as the playback starts.
294        ///
295        /// Not all rendering scenarios use prerolling. For example, the method won't be called while seeking.
296        ///
297        /// If called, the method is guaranteed to be invoked before the first -startVideoCompositionRequest: call.
298        ///
299        /// The method is synchronous. The prerolling won't finish until the method returns.
300        #[optional]
301        #[unsafe(method(prerollForRenderingUsingHint:))]
302        #[unsafe(method_family = none)]
303        unsafe fn prerollForRenderingUsingHint(&self, render_hint: &AVVideoCompositionRenderHint);
304    }
305);
306
307extern_class!(
308    /// [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avasynchronousvideocompositionrequest?language=objc)
309    #[unsafe(super(NSObject))]
310    #[derive(Debug, PartialEq, Eq, Hash)]
311    pub struct AVAsynchronousVideoCompositionRequest;
312);
313
314unsafe impl NSCopying for AVAsynchronousVideoCompositionRequest {}
315
316unsafe impl CopyingHelper for AVAsynchronousVideoCompositionRequest {
317    type Result = Self;
318}
319
320unsafe impl NSObjectProtocol for AVAsynchronousVideoCompositionRequest {}
321
322impl AVAsynchronousVideoCompositionRequest {
323    extern_methods!(
324        #[unsafe(method(renderContext))]
325        #[unsafe(method_family = none)]
326        pub unsafe fn renderContext(&self) -> Retained<AVVideoCompositionRenderContext>;
327
328        #[cfg(feature = "objc2-core-media")]
329        #[unsafe(method(compositionTime))]
330        #[unsafe(method_family = none)]
331        pub unsafe fn compositionTime(&self) -> CMTime;
332
333        #[unsafe(method(sourceTrackIDs))]
334        #[unsafe(method_family = none)]
335        pub unsafe fn sourceTrackIDs(&self) -> Retained<NSArray<NSNumber>>;
336
337        #[unsafe(method(sourceSampleDataTrackIDs))]
338        #[unsafe(method_family = none)]
339        pub unsafe fn sourceSampleDataTrackIDs(&self) -> Retained<NSArray<NSNumber>>;
340
341        #[unsafe(method(videoCompositionInstruction))]
342        #[unsafe(method_family = none)]
343        pub unsafe fn videoCompositionInstruction(
344            &self,
345        ) -> Retained<ProtocolObject<dyn AVVideoCompositionInstructionProtocol>>;
346
347        #[cfg(all(feature = "objc2-core-media", feature = "objc2-core-video"))]
348        /// Returns the source CVPixelBufferRef for the given track ID
349        ///
350        /// Parameter `trackID`: The track ID for the requested source frame
351        #[unsafe(method(sourceFrameByTrackID:))]
352        #[unsafe(method_family = none)]
353        pub unsafe fn sourceFrameByTrackID(
354            &self,
355            track_id: CMPersistentTrackID,
356        ) -> Option<Retained<CVPixelBuffer>>;
357
358        #[cfg(feature = "objc2-core-media")]
359        /// Returns the source CMSampleBufferRef for the given track ID
360        ///
361        /// Parameter `trackID`: The track ID for the requested source sample buffer
362        #[unsafe(method(sourceSampleBufferByTrackID:))]
363        #[unsafe(method_family = none)]
364        pub unsafe fn sourceSampleBufferByTrackID(
365            &self,
366            track_id: CMPersistentTrackID,
367        ) -> Option<Retained<CMSampleBuffer>>;
368
369        #[cfg(all(feature = "AVTimedMetadataGroup", feature = "objc2-core-media"))]
370        /// Returns the source AVTimedMetadataGroup * for the given track ID
371        ///
372        /// Parameter `trackID`: The track ID for the requested source timed metadata group.
373        #[unsafe(method(sourceTimedMetadataByTrackID:))]
374        #[unsafe(method_family = none)]
375        pub unsafe fn sourceTimedMetadataByTrackID(
376            &self,
377            track_id: CMPersistentTrackID,
378        ) -> Option<Retained<AVTimedMetadataGroup>>;
379
380        #[cfg(feature = "objc2-core-video")]
381        /// The method that the custom compositor calls when composition succeeds.
382        ///
383        /// Parameter `composedVideoFrame`: The video frame to finish with.
384        #[unsafe(method(finishWithComposedVideoFrame:))]
385        #[unsafe(method_family = none)]
386        pub unsafe fn finishWithComposedVideoFrame(&self, composed_video_frame: &CVPixelBuffer);
387
388        #[unsafe(method(finishWithError:))]
389        #[unsafe(method_family = none)]
390        pub unsafe fn finishWithError(&self, error: &NSError);
391
392        #[unsafe(method(finishCancelledRequest))]
393        #[unsafe(method_family = none)]
394        pub unsafe fn finishCancelledRequest(&self);
395    );
396}
397
398/// Methods declared on superclass `NSObject`.
399impl AVAsynchronousVideoCompositionRequest {
400    extern_methods!(
401        #[unsafe(method(init))]
402        #[unsafe(method_family = init)]
403        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
404
405        #[unsafe(method(new))]
406        #[unsafe(method_family = new)]
407        pub unsafe fn new() -> Retained<Self>;
408    );
409}
410
411extern_class!(
412    /// [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avasynchronousciimagefilteringrequest?language=objc)
413    #[unsafe(super(NSObject))]
414    #[derive(Debug, PartialEq, Eq, Hash)]
415    pub struct AVAsynchronousCIImageFilteringRequest;
416);
417
418unsafe impl NSCopying for AVAsynchronousCIImageFilteringRequest {}
419
420unsafe impl CopyingHelper for AVAsynchronousCIImageFilteringRequest {
421    type Result = Self;
422}
423
424unsafe impl NSObjectProtocol for AVAsynchronousCIImageFilteringRequest {}
425
426impl AVAsynchronousCIImageFilteringRequest {
427    extern_methods!(
428        #[cfg(feature = "objc2-core-foundation")]
429        #[unsafe(method(renderSize))]
430        #[unsafe(method_family = none)]
431        pub unsafe fn renderSize(&self) -> CGSize;
432
433        #[cfg(feature = "objc2-core-media")]
434        #[unsafe(method(compositionTime))]
435        #[unsafe(method_family = none)]
436        pub unsafe fn compositionTime(&self) -> CMTime;
437
438        #[cfg(feature = "objc2-core-image")]
439        #[cfg(not(target_os = "watchos"))]
440        #[unsafe(method(sourceImage))]
441        #[unsafe(method_family = none)]
442        pub unsafe fn sourceImage(&self) -> Retained<CIImage>;
443
444        #[cfg(feature = "objc2-core-image")]
445        #[cfg(not(target_os = "watchos"))]
446        #[unsafe(method(finishWithImage:context:))]
447        #[unsafe(method_family = none)]
448        pub unsafe fn finishWithImage_context(
449            &self,
450            filtered_image: &CIImage,
451            context: Option<&CIContext>,
452        );
453
454        #[unsafe(method(finishWithError:))]
455        #[unsafe(method_family = none)]
456        pub unsafe fn finishWithError(&self, error: &NSError);
457    );
458}
459
460/// Methods declared on superclass `NSObject`.
461impl AVAsynchronousCIImageFilteringRequest {
462    extern_methods!(
463        #[unsafe(method(init))]
464        #[unsafe(method_family = init)]
465        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
466
467        #[unsafe(method(new))]
468        #[unsafe(method_family = new)]
469        pub unsafe fn new() -> Retained<Self>;
470    );
471}
472
473extern_protocol!(
474    /// The AVVideoCompositionInstruction protocol is implemented by objects to represent operations to be performed by a compositor.
475    ///
476    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avvideocompositioninstructionprotocol?language=objc)
477    #[name = "AVVideoCompositionInstruction"]
478    pub unsafe trait AVVideoCompositionInstructionProtocol: NSObjectProtocol {
479        #[cfg(feature = "objc2-core-media")]
480        #[unsafe(method(timeRange))]
481        #[unsafe(method_family = none)]
482        unsafe fn timeRange(&self) -> CMTimeRange;
483
484        #[unsafe(method(enablePostProcessing))]
485        #[unsafe(method_family = none)]
486        unsafe fn enablePostProcessing(&self) -> bool;
487
488        #[unsafe(method(containsTweening))]
489        #[unsafe(method_family = none)]
490        unsafe fn containsTweening(&self) -> bool;
491
492        #[unsafe(method(requiredSourceTrackIDs))]
493        #[unsafe(method_family = none)]
494        unsafe fn requiredSourceTrackIDs(&self) -> Option<Retained<NSArray<NSValue>>>;
495
496        #[cfg(feature = "objc2-core-media")]
497        #[unsafe(method(passthroughTrackID))]
498        #[unsafe(method_family = none)]
499        unsafe fn passthroughTrackID(&self) -> CMPersistentTrackID;
500
501        #[optional]
502        #[unsafe(method(requiredSourceSampleDataTrackIDs))]
503        #[unsafe(method_family = none)]
504        unsafe fn requiredSourceSampleDataTrackIDs(&self) -> Retained<NSArray<NSNumber>>;
505    }
506);