objc2_av_foundation/generated/AVVideoCompositing.rs
1//! This file has been automatically generated by `objc2`'s `header-translator`.
2//! DO NOT EDIT
3use core::ffi::*;
4use core::ptr::NonNull;
5use objc2::__framework_prelude::*;
6#[cfg(feature = "objc2-core-foundation")]
7use objc2_core_foundation::*;
8#[cfg(feature = "objc2-core-image")]
9#[cfg(not(target_os = "watchos"))]
10use objc2_core_image::*;
11#[cfg(feature = "objc2-core-media")]
12use objc2_core_media::*;
13#[cfg(feature = "objc2-core-video")]
14use objc2_core_video::*;
15use objc2_foundation::*;
16
17use crate::*;
18
19/// [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avpixelaspectratio?language=objc)
20#[repr(C)]
21#[derive(Clone, Copy, Debug, PartialEq)]
22pub struct AVPixelAspectRatio {
23 pub horizontalSpacing: NSInteger,
24 pub verticalSpacing: NSInteger,
25}
26
27unsafe impl Encode for AVPixelAspectRatio {
28 const ENCODING: Encoding =
29 Encoding::Struct("?", &[<NSInteger>::ENCODING, <NSInteger>::ENCODING]);
30}
31
32unsafe impl RefEncode for AVPixelAspectRatio {
33 const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
34}
35
36/// [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avedgewidths?language=objc)
37#[cfg(feature = "objc2-core-foundation")]
38#[repr(C)]
39#[derive(Clone, Copy, Debug, PartialEq)]
40pub struct AVEdgeWidths {
41 pub left: CGFloat,
42 pub top: CGFloat,
43 pub right: CGFloat,
44 pub bottom: CGFloat,
45}
46
47#[cfg(feature = "objc2-core-foundation")]
48unsafe impl Encode for AVEdgeWidths {
49 const ENCODING: Encoding = Encoding::Struct(
50 "?",
51 &[
52 <CGFloat>::ENCODING,
53 <CGFloat>::ENCODING,
54 <CGFloat>::ENCODING,
55 <CGFloat>::ENCODING,
56 ],
57 );
58}
59
60#[cfg(feature = "objc2-core-foundation")]
61unsafe impl RefEncode for AVEdgeWidths {
62 const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
63}
64
65extern_class!(
66 /// The AVVideoCompositionRenderContext class defines the context within which custom compositors render new output pixels buffers.
67 ///
68 /// An instance of AVVideoCompositionRenderContext provides size and scaling information and offers a service for efficiently providing pixel buffers from a managed pool of buffers.
69 ///
70 /// Subclasses of this type that are used from Swift must fulfill the requirements of a Sendable type.
71 ///
72 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avvideocompositionrendercontext?language=objc)
73 #[unsafe(super(NSObject))]
74 #[derive(Debug, PartialEq, Eq, Hash)]
75 pub struct AVVideoCompositionRenderContext;
76);
77
78unsafe impl Send for AVVideoCompositionRenderContext {}
79
80unsafe impl Sync for AVVideoCompositionRenderContext {}
81
82extern_conformance!(
83 unsafe impl NSObjectProtocol for AVVideoCompositionRenderContext {}
84);
85
86impl AVVideoCompositionRenderContext {
87 extern_methods!(
88 #[cfg(feature = "objc2-core-foundation")]
89 /// Indicates the width and height for rendering frames.
90 ///
91 /// This property is not atomic.
92 ///
93 /// # Safety
94 ///
95 /// This might not be thread-safe.
96 #[unsafe(method(size))]
97 #[unsafe(method_family = none)]
98 pub unsafe fn size(&self) -> CGSize;
99
100 #[cfg(feature = "objc2-core-foundation")]
101 /// Transform to apply to the source image to incorporate renderScale, pixelAspectRatio, edgeWidths.
102 /// The coordinate system origin is the top left corner of the buffer.
103 ///
104 /// This property is not atomic.
105 ///
106 /// # Safety
107 ///
108 /// This might not be thread-safe.
109 #[unsafe(method(renderTransform))]
110 #[unsafe(method_family = none)]
111 pub unsafe fn renderTransform(&self) -> CGAffineTransform;
112
113 /// Indicates a scaling ratio that should be applied when rendering frames.
114 ///
115 /// This property is not atomic.
116 ///
117 /// # Safety
118 ///
119 /// This might not be thread-safe.
120 #[unsafe(method(renderScale))]
121 #[unsafe(method_family = none)]
122 pub unsafe fn renderScale(&self) -> c_float;
123
124 /// Indicates the pixel aspect ratio for rendered frames.
125 ///
126 /// This property is not atomic.
127 ///
128 /// # Safety
129 ///
130 /// This might not be thread-safe.
131 #[unsafe(method(pixelAspectRatio))]
132 #[unsafe(method_family = none)]
133 pub unsafe fn pixelAspectRatio(&self) -> AVPixelAspectRatio;
134
135 #[cfg(feature = "objc2-core-foundation")]
136 /// Indicates the thickness of the edge processing region on the left, top, right and bottom edges, in pixels.
137 ///
138 /// This property is not atomic.
139 ///
140 /// # Safety
141 ///
142 /// This might not be thread-safe.
143 #[unsafe(method(edgeWidths))]
144 #[unsafe(method_family = none)]
145 pub unsafe fn edgeWidths(&self) -> AVEdgeWidths;
146
147 /// Hints the custom compositor that it may use higher quality, potentially slower algorithms.
148 /// Generally true for non real time use cases.
149 ///
150 /// This property is not atomic.
151 ///
152 /// # Safety
153 ///
154 /// This might not be thread-safe.
155 #[unsafe(method(highQualityRendering))]
156 #[unsafe(method_family = none)]
157 pub unsafe fn highQualityRendering(&self) -> bool;
158
159 #[cfg(feature = "AVVideoComposition")]
160 /// The AVVideoComposition being rendered.
161 ///
162 /// This property is not atomic.
163 ///
164 /// # Safety
165 ///
166 /// This might not be thread-safe.
167 #[unsafe(method(videoComposition))]
168 #[unsafe(method_family = none)]
169 pub unsafe fn videoComposition(&self) -> Retained<AVVideoComposition>;
170
171 #[cfg(feature = "objc2-core-video")]
172 /// Vends a CVPixelBuffer to use for rendering
173 ///
174 /// The buffer will have its kCVImageBufferCleanApertureKey and kCVImageBufferPixelAspectRatioKey attachments set to match the current composition processor properties.
175 #[unsafe(method(newPixelBuffer))]
176 #[unsafe(method_family = new)]
177 pub unsafe fn newPixelBuffer(&self) -> Option<Retained<CVPixelBuffer>>;
178 );
179}
180
181/// Methods declared on superclass `NSObject`.
182impl AVVideoCompositionRenderContext {
183 extern_methods!(
184 #[unsafe(method(init))]
185 #[unsafe(method_family = init)]
186 pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
187
188 #[unsafe(method(new))]
189 #[unsafe(method_family = new)]
190 pub unsafe fn new() -> Retained<Self>;
191 );
192}
193
194extern_class!(
195 /// An AVVideoCompositionRenderHint instance contains the information necessary for announcing upcoming rendering request time ranges.
196 ///
197 /// Subclasses of this type that are used from Swift must fulfill the requirements of a Sendable type.
198 ///
199 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avvideocompositionrenderhint?language=objc)
200 #[unsafe(super(NSObject))]
201 #[derive(Debug, PartialEq, Eq, Hash)]
202 pub struct AVVideoCompositionRenderHint;
203);
204
205unsafe impl Send for AVVideoCompositionRenderHint {}
206
207unsafe impl Sync for AVVideoCompositionRenderHint {}
208
209extern_conformance!(
210 unsafe impl NSObjectProtocol for AVVideoCompositionRenderHint {}
211);
212
213impl AVVideoCompositionRenderHint {
214 extern_methods!(
215 #[cfg(feature = "objc2-core-media")]
216 /// The start time of the upcoming composition requests.
217 ///
218 /// This property is not atomic.
219 ///
220 /// # Safety
221 ///
222 /// This might not be thread-safe.
223 #[unsafe(method(startCompositionTime))]
224 #[unsafe(method_family = none)]
225 pub unsafe fn startCompositionTime(&self) -> CMTime;
226
227 #[cfg(feature = "objc2-core-media")]
228 /// The end time of the upcoming composition requests.
229 ///
230 /// This property is not atomic.
231 ///
232 /// # Safety
233 ///
234 /// This might not be thread-safe.
235 #[unsafe(method(endCompositionTime))]
236 #[unsafe(method_family = none)]
237 pub unsafe fn endCompositionTime(&self) -> CMTime;
238 );
239}
240
241/// Methods declared on superclass `NSObject`.
242impl AVVideoCompositionRenderHint {
243 extern_methods!(
244 #[unsafe(method(init))]
245 #[unsafe(method_family = init)]
246 pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
247
248 #[unsafe(method(new))]
249 #[unsafe(method_family = new)]
250 pub unsafe fn new() -> Retained<Self>;
251 );
252}
253
254extern_protocol!(
255 /// Defines properties and methods for custom video compositors
256 ///
257 /// For each AVFoundation object of class AVPlayerItem, AVAssetExportSession, AVAssetImageGenerator, or AVAssetReaderVideoCompositionOutput that has a non-nil value for its videoComposition property, when the value of the customVideoCompositorClass property of the AVVideoComposition is not Nil, AVFoundation creates and uses an instance of that custom video compositor class to process the instructions contained in the AVVideoComposition. The custom video compositor instance will be created when you invoke -setVideoComposition: with an instance of AVVideoComposition that's associated with a different custom video compositor class than the object was previously using.
258 ///
259 /// When creating instances of custom video compositors, AVFoundation initializes them by calling -init and then makes them available to you for further set-up or communication, if any is needed, as the value of the customVideoCompositor property of the object on which -setVideoComposition: was invoked.
260 ///
261 /// Custom video compositor instances will then be retained by the AVFoundation object for as long as the value of its videoComposition property indicates that an instance of the same custom video compositor class should be used, even if the value is changed from one instance of AVVideoComposition to another instance that's associated with the same custom video compositor class.
262 ///
263 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avvideocompositing?language=objc)
264 pub unsafe trait AVVideoCompositing: NSObjectProtocol + Send + Sync {
265 /// Indicates the kinds of source frame pixel buffer attributes a video compositor can accept as input.
266 ///
267 /// The property is required to provide kCVPixelBufferPixelFormatTypeKey along with the attributes for which the compositor needs specific values to work properly. If the attribute kCVPixelBufferPixelFormatTypeKey is missing an exception will be raised. If the custom compositor is meant to be used with an AVVideoCompositionCoreAnimationTool created using the videoCompositionCoreAnimationToolWithAdditionalLayer constructor, kCVPixelFormatType_32BGRA should be indicated as one of the supported pixel format types.
268 ///
269 /// Missing attributes will be set by the composition engine to values allowing the best performance. This property is queried once before any composition request is sent to the compositor. Changing source buffer attributes afterwards is not supported.
270 ///
271 /// This property is not atomic.
272 ///
273 /// # Safety
274 ///
275 /// This might not be thread-safe.
276 #[unsafe(method(sourcePixelBufferAttributes))]
277 #[unsafe(method_family = none)]
278 unsafe fn sourcePixelBufferAttributes(
279 &self,
280 ) -> Option<Retained<NSDictionary<NSString, AnyObject>>>;
281
282 /// Indicates the pixel buffer attributes required by the video compositor for new buffers that it creates for processing.
283 ///
284 /// The property is required to provide kCVPixelBufferPixelFormatTypeKey along with attributes for which the compositor needs specific values to work properly. Omitted attributes will be supplied by the composition engine to allow for the best performance. If the attribute kCVPixelBufferPixelFormatTypeKey is missing an exception will be raised. The getter for requiredPixelBufferAttributesForRenderContext is typically invoked prior to the creation of a new render context; the combination of the attributes in the returned value and the additional attributes supplied by the composition engine will be used in the creation of subsequent render context's pixelBuffers.
285 ///
286 /// This property is queried once before any composition request is sent to the compositor. Changing required buffer attributes afterwards is not supported.
287 ///
288 /// This property is not atomic.
289 ///
290 /// # Safety
291 ///
292 /// This might not be thread-safe.
293 #[unsafe(method(requiredPixelBufferAttributesForRenderContext))]
294 #[unsafe(method_family = none)]
295 unsafe fn requiredPixelBufferAttributesForRenderContext(
296 &self,
297 ) -> Retained<NSDictionary<NSString, AnyObject>>;
298
299 /// Called to notify the custom compositor that a composition will switch to a different render context
300 ///
301 /// Instances of classes implementing the AVVideoComposting protocol can implement this method to be notified when
302 /// the AVVideoCompositionRenderContext instance handing a video composition changes. AVVideoCompositionRenderContext instances
303 /// being immutable, such a change will occur every time there is a change in the video composition parameters.
304 ///
305 /// - Parameter newRenderContext: The render context that will be handling the video composition from this point
306 #[unsafe(method(renderContextChanged:))]
307 #[unsafe(method_family = none)]
308 unsafe fn renderContextChanged(&self, new_render_context: &AVVideoCompositionRenderContext);
309
310 /// Directs a custom video compositor object to create a new pixel buffer composed asynchronously from a collection of sources.
311 ///
312 /// The custom compositor is expected to invoke, either subsequently or immediately, either:
313 /// -[AVAsynchronousVideoCompositionRequest finishWithComposedVideoFrame:] or
314 /// -[AVAsynchronousVideoCompositionRequest finishWithError:]. If you intend to finish rendering the frame after your
315 /// handling of this message returns, you must retain the instance of AVAsynchronousVideoCompositionRequest until after composition is finished.
316 /// Note that if the custom compositor's implementation of -startVideoCompositionRequest: returns without finishing the composition immediately,
317 /// it may be invoked again with another composition request before the prior request is finished; therefore in such cases the custom compositor should
318 /// be prepared to manage multiple composition requests.
319 ///
320 /// If the rendered frame is exactly the same as one of the source frames, with no letterboxing, pillboxing or cropping needed,
321 /// then the appropriate source pixel buffer may be returned (after CFRetain has been called on it).
322 ///
323 /// - Parameter asyncVideoCompositionRequest: An instance of AVAsynchronousVideoCompositionRequest that provides context for the requested composition.
324 #[unsafe(method(startVideoCompositionRequest:))]
325 #[unsafe(method_family = none)]
326 unsafe fn startVideoCompositionRequest(
327 &self,
328 async_video_composition_request: &AVAsynchronousVideoCompositionRequest,
329 );
330
331 /// Directs a custom video compositor object to cancel or finish all pending video composition requests
332 ///
333 /// When receiving this message, a custom video compositor must block until it has either cancelled all pending frame requests,
334 /// and called the finishCancelledRequest callback for each of them, or, if cancellation is not possible, finished processing of all the frames
335 /// and called the finishWithComposedVideoFrame: or finishWithComposedTaggedBufferGroup: callback for each of them.
336 #[optional]
337 #[unsafe(method(cancelAllPendingVideoCompositionRequests))]
338 #[unsafe(method_family = none)]
339 unsafe fn cancelAllPendingVideoCompositionRequests(&self);
340
341 /// Indicates that clients can handle frames that contains wide color properties.
342 ///
343 /// Controls whether the client will receive frames that contain wide color information. Care should be taken to avoid clamping.
344 ///
345 /// This property is not atomic.
346 ///
347 /// # Safety
348 ///
349 /// This might not be thread-safe.
350 #[optional]
351 #[unsafe(method(supportsWideColorSourceFrames))]
352 #[unsafe(method_family = none)]
353 unsafe fn supportsWideColorSourceFrames(&self) -> bool;
354
355 /// Indicates that the client's video compositor can handle frames that contain high dynamic range (HDR) properties.
356 ///
357 /// Controls whether the client will receive frames that contain HDR information.
358 /// If this field is omitted or set to NO, the framework will convert HDR frames to standard dynamic range (SDR) with BT.709 transfer function before sending to the client.
359 /// If this field is set to YES, the value of supportsWideColorSourceFrames will be ignored and assumed to be YES.
360 ///
361 /// This property is not atomic.
362 ///
363 /// # Safety
364 ///
365 /// This might not be thread-safe.
366 #[optional]
367 #[unsafe(method(supportsHDRSourceFrames))]
368 #[unsafe(method_family = none)]
369 unsafe fn supportsHDRSourceFrames(&self) -> bool;
370
371 /// This property is not atomic.
372 ///
373 /// # Safety
374 ///
375 /// This might not be thread-safe.
376 #[optional]
377 #[unsafe(method(supportsSourceTaggedBuffers))]
378 #[unsafe(method_family = none)]
379 unsafe fn supportsSourceTaggedBuffers(&self) -> bool;
380
381 /// This property is not atomic.
382 ///
383 /// # Safety
384 ///
385 /// This might not be thread-safe.
386 #[optional]
387 #[unsafe(method(canConformColorOfSourceFrames))]
388 #[unsafe(method_family = none)]
389 unsafe fn canConformColorOfSourceFrames(&self) -> bool;
390
391 /// Informs a custom video compositor about upcoming rendering requests.
392 ///
393 /// In the method the compositor can load composition resources such as overlay images which will be needed in the anticipated rendering time range.
394 ///
395 /// Unlike -startVideoCompositionRequest, which is invoked only when the frame compositing is necessary, the framework typically calls this method every frame duration. It allows the custom compositor to load and unload a composition resource such as overlay images at an appropriate timing.
396 ///
397 /// In forward playback, renderHint's startCompositionTime is less than endCompositionTime. In reverse playback, its endCompositionTime is less than startCompositionTime. For seeking, startCompositionTime == endCompositionTime, which means the upcoming composition request time range is unknown and the compositor shouldn’t preload time associated composition resources eagerly.
398 ///
399 /// The method is guaranteed to be called before -startVideoCompositionRequest: for a given composition time.
400 ///
401 /// The method is synchronous. The implementation should return quickly because otherwise the playback would stall and cause frame drops.
402 ///
403 /// - Parameter renderHint: Information about the upcoming composition requests.
404 #[optional]
405 #[unsafe(method(anticipateRenderingUsingHint:))]
406 #[unsafe(method_family = none)]
407 unsafe fn anticipateRenderingUsingHint(&self, render_hint: &AVVideoCompositionRenderHint);
408
409 /// Tell a custom video compositor to perform any work in prerolling phase.
410 ///
411 /// The framework may perform prerolling to load media data to prime the render pipelines for smoother playback. This method is called in the prerolling phase so that the compositor can load composition resources such as overlay images which will be needed as soon as the playback starts.
412 ///
413 /// Not all rendering scenarios use prerolling. For example, the method won't be called while seeking.
414 ///
415 /// If called, the method is guaranteed to be invoked before the first -startVideoCompositionRequest: call.
416 ///
417 /// The method is synchronous. The prerolling won't finish until the method returns.
418 ///
419 /// - Parameter renderHint: Information about the upcoming composition requests.
420 #[optional]
421 #[unsafe(method(prerollForRenderingUsingHint:))]
422 #[unsafe(method_family = none)]
423 unsafe fn prerollForRenderingUsingHint(&self, render_hint: &AVVideoCompositionRenderHint);
424 }
425);
426
427extern_class!(
428 /// An AVAsynchronousVideoCompositionRequest instance contains the information necessary for a video compositor to render an output pixel buffer. The video compositor must implement the AVVideoCompositing protocol.
429 ///
430 /// Subclasses of this type that are used from Swift must fulfill the requirements of a Sendable type.
431 ///
432 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avasynchronousvideocompositionrequest?language=objc)
433 #[unsafe(super(NSObject))]
434 #[derive(Debug, PartialEq, Eq, Hash)]
435 pub struct AVAsynchronousVideoCompositionRequest;
436);
437
438unsafe impl Send for AVAsynchronousVideoCompositionRequest {}
439
440unsafe impl Sync for AVAsynchronousVideoCompositionRequest {}
441
442extern_conformance!(
443 unsafe impl NSCopying for AVAsynchronousVideoCompositionRequest {}
444);
445
446unsafe impl CopyingHelper for AVAsynchronousVideoCompositionRequest {
447 type Result = Self;
448}
449
450extern_conformance!(
451 unsafe impl NSObjectProtocol for AVAsynchronousVideoCompositionRequest {}
452);
453
454impl AVAsynchronousVideoCompositionRequest {
455 extern_methods!(
456 /// The AVVideoCompositionRenderContext making the request
457 ///
458 /// This property is not atomic.
459 ///
460 /// # Safety
461 ///
462 /// This might not be thread-safe.
463 #[unsafe(method(renderContext))]
464 #[unsafe(method_family = none)]
465 pub unsafe fn renderContext(&self) -> Retained<AVVideoCompositionRenderContext>;
466
467 #[cfg(feature = "objc2-core-media")]
468 /// The time for which the frame should be composed
469 ///
470 /// This property is not atomic.
471 ///
472 /// # Safety
473 ///
474 /// This might not be thread-safe.
475 #[unsafe(method(compositionTime))]
476 #[unsafe(method_family = none)]
477 pub unsafe fn compositionTime(&self) -> CMTime;
478
479 /// Track IDs of all the source video buffers that are available to compose the frame.
480 ///
481 /// This property is not atomic.
482 ///
483 /// # Safety
484 ///
485 /// This might not be thread-safe.
486 #[unsafe(method(sourceTrackIDs))]
487 #[unsafe(method_family = none)]
488 pub unsafe fn sourceTrackIDs(&self) -> Retained<NSArray<NSNumber>>;
489
490 /// Track IDs of all the source sample data buffers that are available to compose the frame.
491 ///
492 /// This property is not atomic.
493 ///
494 /// # Safety
495 ///
496 /// This might not be thread-safe.
497 #[unsafe(method(sourceSampleDataTrackIDs))]
498 #[unsafe(method_family = none)]
499 pub unsafe fn sourceSampleDataTrackIDs(&self) -> Retained<NSArray<NSNumber>>;
500
501 /// The AVVideoCompositionInstruction to use to compose the frame.
502 ///
503 /// This property is not atomic.
504 ///
505 /// # Safety
506 ///
507 /// This might not be thread-safe.
508 #[unsafe(method(videoCompositionInstruction))]
509 #[unsafe(method_family = none)]
510 pub unsafe fn videoCompositionInstruction(
511 &self,
512 ) -> Retained<ProtocolObject<dyn AVVideoCompositionInstructionProtocol>>;
513
514 #[cfg(all(feature = "objc2-core-media", feature = "objc2-core-video"))]
515 /// Returns the source CVPixelBufferRef for the given track ID
516 ///
517 /// If the track contains tagged buffers, a pixel buffer from one of the tagged buffers will be returned.
518 ///
519 /// - Parameter trackID: The track ID for the requested source frame
520 #[unsafe(method(sourceFrameByTrackID:))]
521 #[unsafe(method_family = none)]
522 pub unsafe fn sourceFrameByTrackID(
523 &self,
524 track_id: CMPersistentTrackID,
525 ) -> Option<Retained<CVPixelBuffer>>;
526
527 #[cfg(feature = "objc2-core-media")]
528 /// Returns the source CMSampleBufferRef for the given track ID
529 ///
530 /// - Parameter trackID: The track ID for the requested source sample buffer
531 #[unsafe(method(sourceSampleBufferByTrackID:))]
532 #[unsafe(method_family = none)]
533 pub unsafe fn sourceSampleBufferByTrackID(
534 &self,
535 track_id: CMPersistentTrackID,
536 ) -> Option<Retained<CMSampleBuffer>>;
537
538 #[cfg(all(feature = "AVTimedMetadataGroup", feature = "objc2-core-media"))]
539 /// Returns the source AVTimedMetadataGroup * for the given track ID
540 ///
541 /// - Parameter trackID: The track ID for the requested source timed metadata group.
542 #[unsafe(method(sourceTimedMetadataByTrackID:))]
543 #[unsafe(method_family = none)]
544 pub unsafe fn sourceTimedMetadataByTrackID(
545 &self,
546 track_id: CMPersistentTrackID,
547 ) -> Option<Retained<AVTimedMetadataGroup>>;
548
549 #[cfg(feature = "objc2-core-video")]
550 /// The method that the custom compositor calls when composition succeeds.
551 ///
552 /// - Parameter composedVideoFrame: The video frame to finish with. Call finishWithComposedTaggedBufferGroup: instead if outputBufferDescription is non-nil.
553 #[unsafe(method(finishWithComposedVideoFrame:))]
554 #[unsafe(method_family = none)]
555 pub unsafe fn finishWithComposedVideoFrame(&self, composed_video_frame: &CVPixelBuffer);
556
557 /// callback the custom compositor should call when composition failed. The error parameter should describe the actual error.
558 #[unsafe(method(finishWithError:))]
559 #[unsafe(method_family = none)]
560 pub unsafe fn finishWithError(&self, error: &NSError);
561
562 /// callback the custom compositor should call for a request that has been cancelled.
563 #[unsafe(method(finishCancelledRequest))]
564 #[unsafe(method_family = none)]
565 pub unsafe fn finishCancelledRequest(&self);
566
567 #[cfg(feature = "objc2-core-media")]
568 /// Returns the source CMTaggedBufferGroupRef for the given track ID.
569 ///
570 /// Returns nil if the video track does not contain tagged buffers. Returns nil if the track does not contain video. This function should only be called when supportsSourceTaggedBuffers is YES.
571 ///
572 /// - Parameter trackID: The track ID for the requested source tagged buffer group.
573 #[unsafe(method(sourceTaggedBufferGroupByTrackID:))]
574 #[unsafe(method_family = none)]
575 pub unsafe fn sourceTaggedBufferGroupByTrackID(
576 &self,
577 track_id: CMPersistentTrackID,
578 ) -> Option<Retained<CMTaggedBufferGroup>>;
579
580 #[cfg(feature = "objc2-core-media")]
581 /// The method that the custom compositor calls when composition succeeds.
582 ///
583 /// - Parameter taggedBufferGroup: The tagged buffer group containing the composed tagged buffers. The tagged buffers must be compatible with the outputBufferDescription specified in the video composition. The outputBufferDescription must not be nil when calling this function.
584 /// NOTE: If ``AVVideoComposition/spatialConfigurations`` is not empty, then ``attach(spatialVideoConfiguration:to:)`` must be called with one of the spatial configurations. An exception will be thrown otherwise. Also, all pixel buffers must be associated with the same spatial configuration. An exception will be thrown otherwise.
585 #[unsafe(method(finishWithComposedTaggedBufferGroup:))]
586 #[unsafe(method_family = none)]
587 pub unsafe fn finishWithComposedTaggedBufferGroup(
588 &self,
589 tagged_buffer_group: &CMTaggedBufferGroup,
590 );
591
592 #[cfg(all(feature = "AVSpatialVideoConfiguration", feature = "objc2-core-video"))]
593 /// Associates the pixel buffer with the specified spatial configuration.
594 /// - Parameters:
595 /// - spatialVideoConfiguration: The spatial configuration to associate with the pixel buffer.
596 /// - pixelBuffer: The pixel buffer to associate with the spatial configuration.
597 /// NOTE: The spatial configuration must be one of the spatial configurations specified in the ``AVVideoComposition/spatialConfigurations`` property. An exception will be thrown otherwise.
598 /// NOTE: All pixel buffers from the custom compositor must be associated with the same spatial configuration. An exception will be thrown otherwise.
599 /// A spatial configuration with all nil values indicates the video is not spatial. A nil spatial configuration also indicates the video is not spatial. The value can be nil, which indicates the output will not be spatial, but a spatial configuration with all nil values must be in the ``AVVideoComposition/spatialConfigurations`` property or an exception will be thrown.
600 #[unsafe(method(attachSpatialVideoConfiguration:toPixelBuffer:))]
601 #[unsafe(method_family = none)]
602 pub unsafe fn attachSpatialVideoConfiguration_toPixelBuffer(
603 &self,
604 spatial_video_configuration: Option<&AVSpatialVideoConfiguration>,
605 pixel_buffer: &CVPixelBuffer,
606 );
607 );
608}
609
610/// Methods declared on superclass `NSObject`.
611impl AVAsynchronousVideoCompositionRequest {
612 extern_methods!(
613 #[unsafe(method(init))]
614 #[unsafe(method_family = init)]
615 pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
616
617 #[unsafe(method(new))]
618 #[unsafe(method_family = new)]
619 pub unsafe fn new() -> Retained<Self>;
620 );
621}
622
623extern_class!(
624 /// [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avasynchronousciimagefilteringrequest?language=objc)
625 #[unsafe(super(NSObject))]
626 #[derive(Debug, PartialEq, Eq, Hash)]
627 pub struct AVAsynchronousCIImageFilteringRequest;
628);
629
630extern_conformance!(
631 unsafe impl NSCopying for AVAsynchronousCIImageFilteringRequest {}
632);
633
634unsafe impl CopyingHelper for AVAsynchronousCIImageFilteringRequest {
635 type Result = Self;
636}
637
638extern_conformance!(
639 unsafe impl NSObjectProtocol for AVAsynchronousCIImageFilteringRequest {}
640);
641
642impl AVAsynchronousCIImageFilteringRequest {
643 extern_methods!(
644 #[cfg(feature = "objc2-core-foundation")]
645 /// Width and height for rendering frames.
646 #[unsafe(method(renderSize))]
647 #[unsafe(method_family = none)]
648 pub unsafe fn renderSize(&self) -> CGSize;
649
650 #[cfg(feature = "objc2-core-media")]
651 /// The time for which the frame should be filtered
652 #[unsafe(method(compositionTime))]
653 #[unsafe(method_family = none)]
654 pub unsafe fn compositionTime(&self) -> CMTime;
655
656 #[cfg(feature = "objc2-core-image")]
657 #[cfg(not(target_os = "watchos"))]
658 /// CIImage for the first enabled source video track. Unlike AVAsynchronousVideoCompositionRequest, renderContext.renderTransform is already applied to the source image.
659 #[unsafe(method(sourceImage))]
660 #[unsafe(method_family = none)]
661 pub unsafe fn sourceImage(&self) -> Retained<CIImage>;
662
663 #[cfg(feature = "objc2-core-image")]
664 #[cfg(not(target_os = "watchos"))]
665 /// Callback the filter should call when filtering succeeded. If context is nil then a default context will be used, GPU-accelerated if possible.
666 ///
667 /// It is safe to pass in the sourceImage in which case the filter will appear to have no effect, essentially functioning as a pass-through.
668 #[unsafe(method(finishWithImage:context:))]
669 #[unsafe(method_family = none)]
670 pub unsafe fn finishWithImage_context(
671 &self,
672 filtered_image: &CIImage,
673 context: Option<&CIContext>,
674 );
675
676 /// Callback the filter should call when filtering failed. The error parameter should describe the actual error.
677 #[unsafe(method(finishWithError:))]
678 #[unsafe(method_family = none)]
679 pub unsafe fn finishWithError(&self, error: &NSError);
680 );
681}
682
683/// Methods declared on superclass `NSObject`.
684impl AVAsynchronousCIImageFilteringRequest {
685 extern_methods!(
686 #[unsafe(method(init))]
687 #[unsafe(method_family = init)]
688 pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
689
690 #[unsafe(method(new))]
691 #[unsafe(method_family = new)]
692 pub unsafe fn new() -> Retained<Self>;
693 );
694}
695
696extern_protocol!(
697 /// The AVVideoCompositionInstruction protocol is implemented by objects to represent operations to be performed by a compositor. Subclasses of this type that are used from Swift must fulfill the requirements of a Sendable type.
698 ///
699 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avvideocompositioninstructionprotocol?language=objc)
700 #[doc(alias = "AVVideoCompositionInstruction")]
701 #[name = "AVVideoCompositionInstruction"]
702 pub unsafe trait AVVideoCompositionInstructionProtocol:
703 NSObjectProtocol + Send + Sync
704 {
705 #[cfg(feature = "objc2-core-media")]
706 /// Indicates the timeRange during which the instruction is effective. Note requirements for the timeRanges of instructions described in connection with AVVideoComposition's instructions key above.
707 ///
708 /// This property is not atomic.
709 ///
710 /// # Safety
711 ///
712 /// This might not be thread-safe.
713 #[unsafe(method(timeRange))]
714 #[unsafe(method_family = none)]
715 unsafe fn timeRange(&self) -> CMTimeRange;
716
717 /// If NO, indicates that post-processing should be skipped for the duration of this instruction.
718 /// See +[AVVideoCompositionCoreAnimationTool videoCompositionToolWithPostProcessingAsVideoLayer:inLayer:].
719 ///
720 /// This property is not atomic.
721 ///
722 /// # Safety
723 ///
724 /// This might not be thread-safe.
725 #[unsafe(method(enablePostProcessing))]
726 #[unsafe(method_family = none)]
727 unsafe fn enablePostProcessing(&self) -> bool;
728
729 /// If YES, rendering a frame from the same source buffers and the same composition instruction at 2 different compositionTime may yield different output frames. If NO, 2 such compositions would yield the same frame. The media pipeline may be able to avoid some duplicate processing when containsTweening is NO
730 ///
731 /// This property is not atomic.
732 ///
733 /// # Safety
734 ///
735 /// This might not be thread-safe.
736 #[unsafe(method(containsTweening))]
737 #[unsafe(method_family = none)]
738 unsafe fn containsTweening(&self) -> bool;
739
740 /// List of video track IDs required to compose frames for this instruction. If the value of this property is nil, all source tracks will be considered required for composition
741 ///
742 /// This property is not atomic.
743 ///
744 /// # Safety
745 ///
746 /// This might not be thread-safe.
747 #[unsafe(method(requiredSourceTrackIDs))]
748 #[unsafe(method_family = none)]
749 unsafe fn requiredSourceTrackIDs(&self) -> Option<Retained<NSArray<NSValue>>>;
750
751 #[cfg(feature = "objc2-core-media")]
752 /// If for the duration of the instruction, the video composition result is one of the source frames, this property should return the corresponding track ID. The compositor won't be run for the duration of the instruction and the proper source frame will be used instead. The dimensions, clean aperture and pixel aspect ratio of the source buffer will be matched to the required values automatically
753 ///
754 /// This property is not atomic.
755 ///
756 /// # Safety
757 ///
758 /// This might not be thread-safe.
759 #[unsafe(method(passthroughTrackID))]
760 #[unsafe(method_family = none)]
761 unsafe fn passthroughTrackID(&self) -> CMPersistentTrackID;
762
763 /// List of sample data track IDs required to compose frames for this instruction. An empty array indicates that no sample data is required for this instruction.
764 ///
765 /// This property is not atomic.
766 ///
767 /// # Safety
768 ///
769 /// This might not be thread-safe.
770 #[optional]
771 #[unsafe(method(requiredSourceSampleDataTrackIDs))]
772 #[unsafe(method_family = none)]
773 unsafe fn requiredSourceSampleDataTrackIDs(&self) -> Retained<NSArray<NSNumber>>;
774 }
775);