objc2_av_foundation/generated/AVVideoComposition.rs
1//! This file has been automatically generated by `objc2`'s `header-translator`.
2//! DO NOT EDIT
3use core::ffi::*;
4use core::ptr::NonNull;
5use objc2::__framework_prelude::*;
6#[cfg(feature = "objc2-core-foundation")]
7use objc2_core_foundation::*;
8#[cfg(feature = "objc2-core-graphics")]
9use objc2_core_graphics::*;
10#[cfg(feature = "objc2-core-media")]
11use objc2_core_media::*;
12use objc2_foundation::*;
13#[cfg(feature = "objc2-quartz-core")]
14#[cfg(not(target_os = "watchos"))]
15use objc2_quartz_core::*;
16
17use crate::*;
18
19/// Configures policy for per frame HDR display metadata
20///
21/// Determines what HDR display metadata should be attached to the rendered frame.
22///
23/// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avvideocompositionperframehdrdisplaymetadatapolicy?language=objc)
24// NS_TYPED_ENUM
25pub type AVVideoCompositionPerFrameHDRDisplayMetadataPolicy = NSString;
26
27extern "C" {
28 /// Default. Pass the HDR metadata through, if present on the composed frame.
29 ///
30 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avvideocompositionperframehdrdisplaymetadatapolicypropagate?language=objc)
31 pub static AVVideoCompositionPerFrameHDRDisplayMetadataPolicyPropagate:
32 &'static AVVideoCompositionPerFrameHDRDisplayMetadataPolicy;
33}
34
35extern "C" {
36 /// AVVideoComposition may generate HDR metadata and attach it to the rendered frame. HDR metadata generation is influenced by the color space of the rendered frame, device, and HDR metadata format platform support. Any previously attached HDR metadata of the same metadata format will be overwritten.
37 ///
38 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avvideocompositionperframehdrdisplaymetadatapolicygenerate?language=objc)
39 pub static AVVideoCompositionPerFrameHDRDisplayMetadataPolicyGenerate:
40 &'static AVVideoCompositionPerFrameHDRDisplayMetadataPolicy;
41}
42
43extern_class!(
44 /// An AVVideoComposition object represents an immutable video composition.
45 ///
46 /// A video composition describes, for any time in the aggregate time range of its instructions, the number and IDs of video tracks that are to be used in order to produce a composed video frame corresponding to that time. When AVFoundation's built-in video compositor is used, the instructions an AVVideoComposition contain can specify a spatial transformation, an opacity value, and a cropping rectangle for each video source, and these can vary over time via simple linear ramping functions.
47 ///
48 /// A client can implement their own custom video compositor by implementing the AVVideoCompositing protocol; a custom video compositor is provided with pixel buffers for each of its video sources during playback and other operations and can perform arbitrary graphical operations on them in order to produce visual output.
49 ///
50 /// Subclasses of this type that are used from Swift must fulfill the requirements of a Sendable type.
51 ///
52 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avvideocomposition?language=objc)
53 #[unsafe(super(NSObject))]
54 #[derive(Debug, PartialEq, Eq, Hash)]
55 pub struct AVVideoComposition;
56);
57
58extern_conformance!(
59 unsafe impl NSCopying for AVVideoComposition {}
60);
61
62unsafe impl CopyingHelper for AVVideoComposition {
63 type Result = Self;
64}
65
66extern_conformance!(
67 unsafe impl NSMutableCopying for AVVideoComposition {}
68);
69
70unsafe impl MutableCopyingHelper for AVVideoComposition {
71 type Result = AVMutableVideoComposition;
72}
73
74extern_conformance!(
75 unsafe impl NSObjectProtocol for AVVideoComposition {}
76);
77
78impl AVVideoComposition {
79 extern_methods!(
80 #[cfg(feature = "AVAsset")]
81 /// Returns a new instance of AVVideoComposition with values and instructions suitable for presenting the video tracks of the specified asset according to its temporal and geometric properties and those of its tracks.
82 ///
83 /// The returned AVVideoComposition will have instructions that respect the spatial properties and timeRanges of the specified asset's video tracks.
84 /// It will also have the following values for its properties:
85 ///
86 /// - If the asset has exactly one video track, the original timing of the source video track will be used. If the asset has more than one video track, and the nominal frame rate of any of video tracks is known, the reciprocal of the greatest known nominalFrameRate will be used as the value of frameDuration. Otherwise, a default framerate of 30fps is used.
87 /// - If the specified asset is an instance of AVComposition, the renderSize will be set to the naturalSize of the AVComposition; otherwise the renderSize will be set to a value that encompasses all of the asset's video tracks.
88 /// - A renderScale of 1.0.
89 /// - A nil animationTool.
90 ///
91 /// If the specified asset has no video tracks, this method will return an AVVideoComposition instance with an empty collection of instructions.
92 ///
93 /// - Parameter asset: An instance of AVAsset. Ensure that the duration and tracks properties of the asset are already loaded before invoking this method.
94 ///
95 /// - Returns: An instance of AVVideoComposition.
96 #[deprecated = "Use videoCompositionWithPropertiesOfAsset:completionHandler: instead"]
97 #[unsafe(method(videoCompositionWithPropertiesOfAsset:))]
98 #[unsafe(method_family = none)]
99 pub unsafe fn videoCompositionWithPropertiesOfAsset(
100 asset: &AVAsset,
101 ) -> Retained<AVVideoComposition>;
102
103 #[cfg(all(feature = "AVAsset", feature = "block2"))]
104 /// Vends a new instance of AVVideoComposition with values and instructions suitable for presenting the video tracks of the specified asset according to its temporal and geometric properties and those of its tracks.
105 ///
106 /// The new AVVideoComposition will have instructions that respect the spatial properties and timeRanges of the specified asset's video tracks.
107 /// It will also have the following values for its properties:
108 ///
109 /// - If the asset has exactly one video track, the original timing of the source video track will be used. If the asset has more than one video track, and the nominal frame rate of any of video tracks is known, the reciprocal of the greatest known nominalFrameRate will be used as the value of frameDuration. Otherwise, a default framerate of 30fps is used.
110 /// - If the specified asset is an instance of AVComposition, the renderSize will be set to the naturalSize of the AVComposition; otherwise the renderSize will be set to a value that encompasses all of the asset's video tracks.
111 /// - A renderScale of 1.0.
112 /// - A nil animationTool.
113 ///
114 /// If the specified asset has no video tracks, this method will return an AVVideoComposition instance with an empty collection of instructions.
115 ///
116 /// - Parameter asset: An instance of AVAsset.
117 /// - Parameter completionHandler: A block that is invoked when the new video composition has finished being created. If the `videoComposition` parameter is nil, the `error` parameter describes the failure that occurred.
118 ///
119 /// # Safety
120 ///
121 /// `completion_handler` block must be sendable.
122 #[unsafe(method(videoCompositionWithPropertiesOfAsset:completionHandler:))]
123 #[unsafe(method_family = none)]
124 pub unsafe fn videoCompositionWithPropertiesOfAsset_completionHandler(
125 asset: &AVAsset,
126 completion_handler: &block2::DynBlock<dyn Fn(*mut AVVideoComposition, *mut NSError)>,
127 );
128
129 /// Pass-through initializer, for internal use in AVFoundation only
130 #[unsafe(method(videoCompositionWithVideoComposition:))]
131 #[unsafe(method_family = none)]
132 pub unsafe fn videoCompositionWithVideoComposition(
133 video_composition: &AVVideoComposition,
134 ) -> Retained<AVVideoComposition>;
135
136 #[cfg(feature = "AVVideoCompositing")]
137 /// Indicates a custom compositor class to use. The class must implement the AVVideoCompositing protocol. If nil, the default, internal video compositor is used
138 #[unsafe(method(customVideoCompositorClass))]
139 #[unsafe(method_family = none)]
140 pub unsafe fn customVideoCompositorClass(&self) -> Option<&'static AnyClass>;
141
142 #[cfg(feature = "objc2-core-media")]
143 /// Indicates the interval which the video composition, when enabled, should render composed video frames
144 #[unsafe(method(frameDuration))]
145 #[unsafe(method_family = none)]
146 pub unsafe fn frameDuration(&self) -> CMTime;
147
148 #[cfg(feature = "objc2-core-media")]
149 /// If sourceTrackIDForFrameTiming is not kCMPersistentTrackID_Invalid, frame timing for the video composition is derived from the source asset's track with the corresponding ID. This may be used to preserve a source asset's variable frame timing. If an empty edit is encountered in the source asset’s track, the compositor composes frames as needed up to the frequency specified in frameDuration property. */
150 #[unsafe(method(sourceTrackIDForFrameTiming))]
151 #[unsafe(method_family = none)]
152 pub unsafe fn sourceTrackIDForFrameTiming(&self) -> CMPersistentTrackID;
153
154 #[cfg(feature = "objc2-core-foundation")]
155 /// Indicates the size at which the video composition, when enabled, should render
156 #[unsafe(method(renderSize))]
157 #[unsafe(method_family = none)]
158 pub unsafe fn renderSize(&self) -> CGSize;
159
160 /// Indicates the scale at which the video composition should render. May only be other than 1.0 for a video composition set on an AVPlayerItem
161 #[unsafe(method(renderScale))]
162 #[unsafe(method_family = none)]
163 pub unsafe fn renderScale(&self) -> c_float;
164
165 #[cfg(feature = "AVVideoCompositing")]
166 /// Indicates instructions for video composition via an NSArray of instances of classes implementing the AVVideoCompositionInstruction protocol. For the first instruction in the array, timeRange.start must be less than or equal to the earliest time for which playback or other processing will be attempted (note that this will typically be kCMTimeZero). For subsequent instructions, timeRange.start must be equal to the prior instruction's end time. The end time of the last instruction must be greater than or equal to the latest time for which playback or other processing will be attempted (note that this will often be the duration of the asset with which the instance of AVVideoComposition is associated).
167 #[unsafe(method(instructions))]
168 #[unsafe(method_family = none)]
169 pub unsafe fn instructions(
170 &self,
171 ) -> Retained<NSArray<ProtocolObject<dyn AVVideoCompositionInstructionProtocol>>>;
172
173 /// Indicates a special video composition tool for use of Core Animation; may be nil
174 #[unsafe(method(animationTool))]
175 #[unsafe(method_family = none)]
176 pub unsafe fn animationTool(&self)
177 -> Option<Retained<AVVideoCompositionCoreAnimationTool>>;
178
179 /// List of all track IDs for tracks from which sample data should be presented to the compositor at any point in the overall composition. The sample data will be delivered to the custom compositor via AVAsynchronousVideoCompositionRequest.
180 #[unsafe(method(sourceSampleDataTrackIDs))]
181 #[unsafe(method_family = none)]
182 pub unsafe fn sourceSampleDataTrackIDs(&self) -> Retained<NSArray<NSNumber>>;
183
184 /// The output buffers of the video composition can be specified with the outputBufferDescription. The value is an array of CMTagCollectionRef objects that describes the output buffers.
185 ///
186 /// If the video composition will output tagged buffers, the details of those buffers should be specified with CMTags. Specifically, the StereoView (eyes) and ProjectionKind must be specified. The behavior is undefined if the output tagged buffers do not match the outputBufferDescription.
187 /// The default is nil, which means monoscopic output. Note that an empty array is not valid. An exception will be thrown if the objects in the array are not of type CMTagCollectionRef.
188 /// Note that tagged buffers are only supported for custom compositors.
189 #[unsafe(method(outputBufferDescription))]
190 #[unsafe(method_family = none)]
191 pub unsafe fn outputBufferDescription(&self) -> Option<Retained<NSArray>>;
192
193 #[cfg(feature = "AVSpatialVideoConfiguration")]
194 /// Indicates the spatial configurations that are available to associate with the output of the video composition.
195 ///
196 /// A custom compositor can output spatial video by specifying one of these spatial configurations. A spatial configuration with all nil values indicates the video is not spatial. A nil spatial configuration also indicates the video is not spatial. The value can be nil, which indicates the output will not be spatial.
197 /// NOTE: If this property is not empty, then the client must attach one of the spatial configurations in this array to all of the pixel buffers, otherwise an exception will be thrown.
198 #[unsafe(method(spatialVideoConfigurations))]
199 #[unsafe(method_family = none)]
200 pub unsafe fn spatialVideoConfigurations(
201 &self,
202 ) -> Retained<NSArray<AVSpatialVideoConfiguration>>;
203 );
204}
205
206/// Methods declared on superclass `NSObject`.
207impl AVVideoComposition {
208 extern_methods!(
209 #[unsafe(method(init))]
210 #[unsafe(method_family = init)]
211 pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
212
213 #[unsafe(method(new))]
214 #[unsafe(method_family = new)]
215 pub unsafe fn new() -> Retained<Self>;
216 );
217}
218
219/// AVVideoCompositionColorimetery.
220///
221/// Indicates the color space of the frames output from the video composition.
222///
223/// Collectively the properties colorPrimaries, colorYCbCrMatrix, and colorTransferFunction define the color space that the rendered frames will be tagged with. For custom video compositing these properties are also used to specify the required color space of the source frames.
224///
225/// For examples of common color spaces see AVVideoSettings.h.
226///
227/// How to preserve the color space of the source frames:
228///
229/// Decide which color space to be preserved by examining the source asset's video tracks. Copy the source track's primaries, matrix and transfer function into the video composition's colorPrimaries, colorYCbCrMatrix and colorTransferFunction respectively.
230///
231/// - When using custom video compositing
232/// Setting these properties will cause source frames to be converted into the specified color space and tagged as such. New frames allocated using -[AVVideoCompositionRenderContext newPixelBuffer] will also be tagged correctly.
233///
234/// - When using Core Image via videoCompositionWithAsset:options:applyingCIFiltersWithHandler:
235/// Setting these properties will cause source frames to be converted into the specified color space and tagged as such. The source frames provided as CIImages will have the appropriate CGColorSpace applied. The color space is preserved when the output CIImage is finally rendered internally.
236///
237/// - When using basic compositing (i.e. AVVideoCompositionLayerInstruction)
238/// Setting these properties will ensure that the internal compositor renders (or passes through) frames in specified color space and are tagged as such.
239impl AVVideoComposition {
240 extern_methods!(
241 /// Rendering will use these primaries and frames will be tagged as such. If the value of this property is nil then the source's primaries will be propagated and used.
242 ///
243 /// Default is nil. Valid values are those suitable for AVVideoColorPrimariesKey. Generally set as a triple along with colorYCbCrMatrix and colorTransferFunction.
244 #[unsafe(method(colorPrimaries))]
245 #[unsafe(method_family = none)]
246 pub unsafe fn colorPrimaries(&self) -> Option<Retained<NSString>>;
247
248 /// Rendering will use this matrix and frames will be tagged as such. If the value of this property is nil then the source's matrix will be propagated and used.
249 ///
250 /// Default is nil. Valid values are those suitable for AVVideoYCbCrMatrixKey. Generally set as a triple along with colorPrimaries and colorTransferFunction.
251 #[unsafe(method(colorYCbCrMatrix))]
252 #[unsafe(method_family = none)]
253 pub unsafe fn colorYCbCrMatrix(&self) -> Option<Retained<NSString>>;
254
255 /// Rendering will use this transfer function and frames will be tagged as such. If the value of this property is nil then the source's transfer function will be propagated and used.
256 ///
257 /// Default is nil. Valid values are those suitable for AVVideoTransferFunctionKey. Generally set as a triple along with colorYCbCrMatrix and colorYCbCrMatrix.
258 #[unsafe(method(colorTransferFunction))]
259 #[unsafe(method_family = none)]
260 pub unsafe fn colorTransferFunction(&self) -> Option<Retained<NSString>>;
261
262 /// Configures policy for per frame HDR display metadata on the rendered frame
263 ///
264 /// Allows the system to identify situations where HDR metadata can be generated and attached to the rendered video frame.
265 /// Default is AVVideoCompositionPerFrameHDRDisplayMetadataPolicyPropagate. Any HDR metadata attached to the composed frame will be propagated to the rendered video frames.
266 #[unsafe(method(perFrameHDRDisplayMetadataPolicy))]
267 #[unsafe(method_family = none)]
268 pub unsafe fn perFrameHDRDisplayMetadataPolicy(
269 &self,
270 ) -> Retained<AVVideoCompositionPerFrameHDRDisplayMetadataPolicy>;
271 );
272}
273
274/// AVVideoCompositionFiltering.
275impl AVVideoComposition {
276 extern_methods!(
277 #[cfg(all(
278 feature = "AVAsset",
279 feature = "AVVideoCompositing",
280 feature = "block2"
281 ))]
282 /// Returns a new instance of AVVideoComposition with values and instructions that will apply the specified handler block to video frames represented as instances of CIImage.
283 ///
284 /// The returned AVVideoComposition will cause the specified handler block to be called to filter each frame of the asset's first enabled video track. The handler block should use the properties of the provided AVAsynchronousCIImageFilteringRequest and respond using finishWithImage:context: with a "filtered" new CIImage (or the provided source image for no affect). In the event of an error, respond to the request using finishWithError:. The error can be observed via AVPlayerItemFailedToPlayToEndTimeNotification, see AVPlayerItemFailedToPlayToEndTimeErrorKey in notification payload.
285 ///
286 /// NOTE: The returned AVVideoComposition's properties are private and support only CIFilter-based operations. Mutations are not supported, either in the values of properties of the AVVideoComposition itself or in its private instructions. If rotations or other transformations are desired, they must be accomplished via the application of CIFilters during the execution of your specified handler.
287 ///
288 /// The video composition will also have the following values for its properties:
289 ///
290 /// - The original timing of the asset's first enabled video track will be used.
291 /// - A renderSize that encompasses the asset's first enabled video track respecting the track's preferredTransform.
292 /// - A renderScale of 1.0.
293 ///
294 /// The default CIContext has the following properties:
295 ///
296 /// - iOS: Device RGB color space
297 /// - macOS: sRGB color space
298 ///
299 /// Example usage:
300 /// ```objc
301 /// playerItem.videoComposition = [AVVideoComposition videoCompositionWithAsset:srcAsset applyingCIFiltersWithHandler:
302 /// ^(AVAsynchronousCIImageFilteringRequest *request)
303 /// {
304 /// NSError *err = nil;
305 /// CIImage *filtered = myRenderer(request,
306 /// &err
307 /// );
308 /// if (filtered)
309 /// [request finishWithImage:filtered context:nil];
310 /// else
311 /// [request finishWithError:err];
312 /// }];
313 /// ```
314 ///
315 /// - Parameter asset: An instance of AVAsset. For best performance, ensure that the duration and tracks properties of the asset are already loaded before invoking this method.
316 ///
317 /// - Returns: An instance of AVVideoComposition.
318 ///
319 /// # Safety
320 ///
321 /// `applier` block must be sendable.
322 #[deprecated = "Use videoCompositionWithAsset:applyingCIFiltersWithHandler:completionHandler: instead"]
323 #[unsafe(method(videoCompositionWithAsset:applyingCIFiltersWithHandler:))]
324 #[unsafe(method_family = none)]
325 pub unsafe fn videoCompositionWithAsset_applyingCIFiltersWithHandler(
326 asset: &AVAsset,
327 applier: &block2::DynBlock<dyn Fn(NonNull<AVAsynchronousCIImageFilteringRequest>)>,
328 ) -> Retained<AVVideoComposition>;
329
330 #[cfg(all(
331 feature = "AVAsset",
332 feature = "AVVideoCompositing",
333 feature = "block2"
334 ))]
335 /// Vends a new instance of AVVideoComposition with values and instructions that will apply the specified handler block to video frames represented as instances of CIImage.
336 ///
337 /// The new AVVideoComposition will cause the specified handler block to be called to filter each frame of the asset's first enabled video track. The handler block should use the properties of the provided AVAsynchronousCIImageFilteringRequest and respond using finishWithImage:context: with a "filtered" new CIImage (or the provided source image for no affect). In the event of an error, respond to the request using finishWithError:. The error can be observed via AVPlayerItemFailedToPlayToEndTimeNotification, see AVPlayerItemFailedToPlayToEndTimeErrorKey in notification payload.
338 ///
339 /// NOTE: The returned AVVideoComposition's properties are private and support only CIFilter-based operations. Mutations are not supported, either in the values of properties of the AVVideoComposition itself or in its private instructions. If rotations or other transformations are desired, they must be accomplished via the application of CIFilters during the execution of your specified handler.
340 ///
341 /// The video composition will also have the following values for its properties:
342 ///
343 /// - The original timing of the asset's first enabled video track will be used.
344 /// - A renderSize that encompasses the asset's first enabled video track respecting the track's preferredTransform.
345 /// - A renderScale of 1.0.
346 ///
347 /// The default CIContext has the following properties:
348 ///
349 /// - iOS: Device RGB color space
350 /// - macOS: sRGB color space
351 ///
352 /// Example usage:
353 /// ```objc
354 /// [AVVideoComposition videoCompositionWithAsset:srcAsset applyingCIFiltersWithHandler:
355 /// ^(AVAsynchronousCIImageFilteringRequest *request)
356 /// {
357 /// NSError *err = nil;
358 /// CIImage *filtered = myRenderer(request,
359 /// &err
360 /// );
361 /// if (filtered)
362 /// [request finishWithImage:filtered context:nil];
363 /// else
364 /// [request finishWithError:err];
365 /// } completionHandler:
366 /// ^(AVVideoComposition * _Nullable videoComposition, NSError * _Nullable error)
367 /// {
368 /// if (videoComposition != nil) {
369 /// playerItem.videoComposition = videoComposition
370 /// else {
371 /// // handle error
372 /// }];
373 /// ```
374 /// - Parameter asset: An instance of AVAsset.
375 /// - Parameter completionHandler: A block that is invoked when the new video composition has finished being created. If the `videoComposition` parameter is nil, the `error` parameter describes the failure that occurred.
376 ///
377 /// # Safety
378 ///
379 /// - `applier` block must be sendable.
380 /// - `completion_handler` block must be sendable.
381 #[unsafe(method(videoCompositionWithAsset:applyingCIFiltersWithHandler:completionHandler:))]
382 #[unsafe(method_family = none)]
383 pub unsafe fn videoCompositionWithAsset_applyingCIFiltersWithHandler_completionHandler(
384 asset: &AVAsset,
385 applier: &block2::DynBlock<dyn Fn(NonNull<AVAsynchronousCIImageFilteringRequest>)>,
386 completion_handler: &block2::DynBlock<dyn Fn(*mut AVVideoComposition, *mut NSError)>,
387 );
388 );
389}
390
391extern_class!(
392 /// [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avmutablevideocomposition?language=objc)
393 #[unsafe(super(AVVideoComposition, NSObject))]
394 #[derive(Debug, PartialEq, Eq, Hash)]
395 pub struct AVMutableVideoComposition;
396);
397
398extern_conformance!(
399 unsafe impl NSCopying for AVMutableVideoComposition {}
400);
401
402unsafe impl CopyingHelper for AVMutableVideoComposition {
403 type Result = AVVideoComposition;
404}
405
406extern_conformance!(
407 unsafe impl NSMutableCopying for AVMutableVideoComposition {}
408);
409
410unsafe impl MutableCopyingHelper for AVMutableVideoComposition {
411 type Result = Self;
412}
413
414extern_conformance!(
415 unsafe impl NSObjectProtocol for AVMutableVideoComposition {}
416);
417
418impl AVMutableVideoComposition {
419 extern_methods!(
420 /// Returns a new instance of AVMutableVideoComposition.
421 ///
422 /// The returned AVMutableVideoComposition will have a frameDuration of kCMTimeZero, a renderSize of {0.0, 0.0}, a nil array of instructions, and a nil animationTool.
423 #[unsafe(method(videoComposition))]
424 #[unsafe(method_family = none)]
425 pub unsafe fn videoComposition() -> Retained<AVMutableVideoComposition>;
426
427 #[cfg(feature = "AVAsset")]
428 /// Returns a new instance of AVMutableVideoComposition with values and instructions suitable for presenting the video tracks of the specified asset according to its temporal and geometric properties and those of its tracks.
429 ///
430 /// The returned AVMutableVideoComposition will have instructions that respect the spatial properties and timeRanges of the specified asset's video tracks. The client can set sourceTrackIDForFrameTiming to kCMPersistentTrackID_Invalid and frameDuration to an appropriate value in order to specify the maximum output frame rate independent of the source track timing.
431 /// It will also have the following values for its properties:
432 ///
433 /// - If the asset has exactly one video track, the original timing of the source video track will be used. If the asset has more than one video track, and the nominal frame rate of any of video tracks is known, the reciprocal of the greatest known nominalFrameRate will be used as the value of frameDuration. Otherwise, a default framerate of 30fps is used.
434 /// - If the specified asset is an instance of AVComposition, the renderSize will be set to the naturalSize of the AVComposition; otherwise the renderSize will be set to a value that encompasses all of the asset's video tracks.
435 /// - A renderScale of 1.0.
436 /// - A nil animationTool.
437 ///
438 /// If the specified asset has no video tracks, this method will return an AVMutableVideoComposition instance with an empty collection of instructions.
439 ///
440 /// - Parameter asset: An instance of AVAsset. For best performance, ensure that the duration and tracks properties of the asset are already loaded before invoking this method.
441 ///
442 /// - Returns: An instance of AVMutableVideoComposition.
443 #[deprecated = "Use videoCompositionWithPropertiesOfAsset:completionHandler: instead"]
444 #[unsafe(method(videoCompositionWithPropertiesOfAsset:))]
445 #[unsafe(method_family = none)]
446 pub unsafe fn videoCompositionWithPropertiesOfAsset(
447 asset: &AVAsset,
448 ) -> Retained<AVMutableVideoComposition>;
449
450 #[cfg(all(feature = "AVAsset", feature = "block2"))]
451 /// Vends a new instance of AVMutableVideoComposition with values and instructions suitable for presenting the video tracks of the specified asset according to its temporal and geometric properties and those of its tracks.
452 ///
453 /// The new AVMutableVideoComposition will have instructions that respect the spatial properties and timeRanges of the specified asset's video tracks. The client can set sourceTrackIDForFrameTiming to kCMPersistentTrackID_Invalid and frameDuration to an appropriate value in order to specify the maximum output frame rate independent of the source track timing.
454 /// It will also have the following values for its properties:
455 ///
456 /// - If the asset has exactly one video track, the original timing of the source video track will be used. If the asset has more than one video track, and the nominal frame rate of any of video tracks is known, the reciprocal of the greatest known nominalFrameRate will be used as the value of frameDuration. Otherwise, a default framerate of 30fps is used.
457 /// - If the specified asset is an instance of AVComposition, the renderSize will be set to the naturalSize of the AVComposition; otherwise the renderSize will be set to a value that encompasses all of the asset's video tracks.
458 /// - A renderScale of 1.0.
459 /// - A nil animationTool.
460 ///
461 /// If the specified asset has no video tracks, this method will return an AVMutableVideoComposition instance with an empty collection of instructions.
462 ///
463 /// - Parameter asset: An instance of AVAsset.
464 /// - Parameter completionHandler: A block that is invoked when the new video composition has finished being created. If the `videoComposition` parameter is nil, the `error` parameter describes the failure that occurred.
465 ///
466 /// # Safety
467 ///
468 /// `completion_handler` block must be sendable.
469 #[unsafe(method(videoCompositionWithPropertiesOfAsset:completionHandler:))]
470 #[unsafe(method_family = none)]
471 pub unsafe fn videoCompositionWithPropertiesOfAsset_completionHandler(
472 asset: &AVAsset,
473 completion_handler: &block2::DynBlock<
474 dyn Fn(*mut AVMutableVideoComposition, *mut NSError),
475 >,
476 );
477
478 #[cfg(feature = "AVAsset")]
479 /// Returns a new instance of AVMutableVideoComposition with values and instructions suitable for presenting the video tracks of the specified asset according to its temporal and geometric properties and those of its tracks, and also overrides default properties with those from a prototypeInstruction.
480 ///
481 /// Also see videoCompositionWithPropertiesOfAsset:.
482 /// The returned AVVideoComposition will have instructions that respect the spatial properties and timeRanges of the specified asset's video tracks. Anything not pertaining to spatial layout and timing, such as background color for their composition or post-processing behaviors, is eligible to be specified via a prototype instruction.
483 /// Example: To add a background color,
484 /// ```objc
485 /// myPrototypeInstruction = [[AVMutableVideoCompositionInstruction alloc] init];
486 /// myPrototypeInstruction.backgroundColor = myCGColorRef; // Do not use constant CGColorRef colors here.
487 /// myVideoComposition = [AVVideoComposition videoCompositionWithPropertiesOfAsset:myAsset prototypeInstruction:myPrototypeInstruction];
488 /// ```
489 /// - Parameter asset: An instance of AVAsset. For best performance, ensure that the duration and tracks properties of the asset are already loaded before invoking this method.
490 /// - Parameter prototypeInstruction: Custom instructions that the client can choose to override.
491 ///
492 /// - Returns: An instance of AVMutableVideoComposition.
493 #[deprecated = "Use videoCompositionWithPropertiesOfAsset:prototypeInstruction:completionHandler: instead"]
494 #[unsafe(method(videoCompositionWithPropertiesOfAsset:prototypeInstruction:))]
495 #[unsafe(method_family = none)]
496 pub unsafe fn videoCompositionWithPropertiesOfAsset_prototypeInstruction(
497 asset: &AVAsset,
498 prototype_instruction: &AVVideoCompositionInstruction,
499 ) -> Retained<AVMutableVideoComposition>;
500
501 #[cfg(all(feature = "AVAsset", feature = "block2"))]
502 /// Vends a new instance of AVMutableVideoComposition with values and instructions suitable for presenting the video tracks of the specified asset according to its temporal and geometric properties and those of its tracks, and also overrides default properties with those from a prototypeInstruction.
503 ///
504 /// Also see videoCompositionWithPropertiesOfAsset:completionHandler:.
505 /// The new AVMutableVideoComposition will have instructions that respect the spatial properties and timeRanges of the specified asset's video tracks. Anything not pertaining to spatial layout and timing, such as background color for their composition or post-processing behaviors, is eligible to be specified via a prototype instruction.
506 /// Example: To add a background color,
507 /// ```objc
508 /// myPrototypeInstruction = [[AVMutableVideoCompositionInstruction alloc] init];
509 /// myPrototypeInstruction.backgroundColor = myCGColorRef; // Do not use constant CGColorRef colors here.
510 /// myVideoComposition = [AVVideoComposition videoCompositionWithPropertiesOfAsset:myAsset prototypeInstruction:myPrototypeInstruction completionHandler:^(AVMutableVideoComposition * _Nullable myVideoComposition, NSError * _Nullable error) {
511 /// if (myVideoComposition != nil) {
512 /// // use myVideoComposition
513 /// }
514 /// else {
515 /// // handle error
516 /// }
517 /// }];
518 /// ```
519 /// - Parameter asset: An instance of AVAsset.
520 /// - Parameter prototypeInstruction: Custom instructions that the client can choose to override.
521 /// - Parameter completionHandler: A block that is invoked when the new video composition has finished being created. If the `videoComposition` parameter is nil, the `error` parameter describes the failure that occurred.
522 ///
523 /// # Safety
524 ///
525 /// `completion_handler` block must be sendable.
526 #[unsafe(method(videoCompositionWithPropertiesOfAsset:prototypeInstruction:completionHandler:))]
527 #[unsafe(method_family = none)]
528 pub unsafe fn videoCompositionWithPropertiesOfAsset_prototypeInstruction_completionHandler(
529 asset: &AVAsset,
530 prototype_instruction: &AVVideoCompositionInstruction,
531 completion_handler: &block2::DynBlock<
532 dyn Fn(*mut AVMutableVideoComposition, *mut NSError),
533 >,
534 );
535
536 #[cfg(feature = "AVVideoCompositing")]
537 /// Indicates the custom compositor class to use. If nil, the default, internal video compositor is used
538 #[unsafe(method(customVideoCompositorClass))]
539 #[unsafe(method_family = none)]
540 pub unsafe fn customVideoCompositorClass(&self) -> Option<&'static AnyClass>;
541
542 #[cfg(feature = "AVVideoCompositing")]
543 /// Setter for [`customVideoCompositorClass`][Self::customVideoCompositorClass].
544 ///
545 /// # Safety
546 ///
547 /// `custom_video_compositor_class` must implement AVVideoCompositing.
548 #[unsafe(method(setCustomVideoCompositorClass:))]
549 #[unsafe(method_family = none)]
550 pub unsafe fn setCustomVideoCompositorClass(
551 &self,
552 custom_video_compositor_class: Option<&AnyClass>,
553 );
554
555 #[cfg(feature = "objc2-core-media")]
556 /// Indicates the interval which the video composition, when enabled, should render composed video frames
557 #[unsafe(method(frameDuration))]
558 #[unsafe(method_family = none)]
559 pub unsafe fn frameDuration(&self) -> CMTime;
560
561 #[cfg(feature = "objc2-core-media")]
562 /// Setter for [`frameDuration`][Self::frameDuration].
563 #[unsafe(method(setFrameDuration:))]
564 #[unsafe(method_family = none)]
565 pub unsafe fn setFrameDuration(&self, frame_duration: CMTime);
566
567 #[cfg(feature = "objc2-core-media")]
568 /// If sourceTrackIDForFrameTiming is not kCMPersistentTrackID_Invalid, frame timing for the video composition is derived from the source asset's track with the corresponding ID. This may be used to preserve a source asset's variable frame timing. If an empty edit is encountered in the source asset’s track, the compositor composes frames as needed up to the frequency specified in frameDuration property.
569 #[unsafe(method(sourceTrackIDForFrameTiming))]
570 #[unsafe(method_family = none)]
571 pub unsafe fn sourceTrackIDForFrameTiming(&self) -> CMPersistentTrackID;
572
573 #[cfg(feature = "objc2-core-media")]
574 /// Setter for [`sourceTrackIDForFrameTiming`][Self::sourceTrackIDForFrameTiming].
575 #[unsafe(method(setSourceTrackIDForFrameTiming:))]
576 #[unsafe(method_family = none)]
577 pub unsafe fn setSourceTrackIDForFrameTiming(
578 &self,
579 source_track_id_for_frame_timing: CMPersistentTrackID,
580 );
581
582 #[cfg(feature = "objc2-core-foundation")]
583 /// Indicates the size at which the video composition, when enabled, should render
584 #[unsafe(method(renderSize))]
585 #[unsafe(method_family = none)]
586 pub unsafe fn renderSize(&self) -> CGSize;
587
588 #[cfg(feature = "objc2-core-foundation")]
589 /// Setter for [`renderSize`][Self::renderSize].
590 #[unsafe(method(setRenderSize:))]
591 #[unsafe(method_family = none)]
592 pub unsafe fn setRenderSize(&self, render_size: CGSize);
593
594 /// Indicates the scale at which the video composition should render. May only be other than 1.0 for a video composition set on an AVPlayerItem
595 #[unsafe(method(renderScale))]
596 #[unsafe(method_family = none)]
597 pub unsafe fn renderScale(&self) -> c_float;
598
599 /// Setter for [`renderScale`][Self::renderScale].
600 #[unsafe(method(setRenderScale:))]
601 #[unsafe(method_family = none)]
602 pub unsafe fn setRenderScale(&self, render_scale: c_float);
603
604 #[cfg(feature = "AVVideoCompositing")]
605 /// Indicates instructions for video composition via an NSArray of instances of classes implementing the AVVideoCompositionInstruction protocol.
606 ///
607 /// For the first instruction in the array, timeRange.start must be less than or equal to the earliest time for which playback or other processing will be attempted (note that this will typically be kCMTimeZero). For subsequent instructions, timeRange.start must be equal to the prior instruction's end time. The end time of the last instruction must be greater than or equal to the latest time for which playback or other processing will be attempted (note that this will often be the duration of the asset with which the instance of AVVideoComposition is associated).
608 #[unsafe(method(instructions))]
609 #[unsafe(method_family = none)]
610 pub unsafe fn instructions(
611 &self,
612 ) -> Retained<NSArray<ProtocolObject<dyn AVVideoCompositionInstructionProtocol>>>;
613
614 #[cfg(feature = "AVVideoCompositing")]
615 /// Setter for [`instructions`][Self::instructions].
616 ///
617 /// This is [copied][objc2_foundation::NSCopying::copy] when set.
618 #[unsafe(method(setInstructions:))]
619 #[unsafe(method_family = none)]
620 pub unsafe fn setInstructions(
621 &self,
622 instructions: &NSArray<ProtocolObject<dyn AVVideoCompositionInstructionProtocol>>,
623 );
624
625 /// Indicates a special video composition tool for use of Core Animation; may be nil
626 #[unsafe(method(animationTool))]
627 #[unsafe(method_family = none)]
628 pub unsafe fn animationTool(&self)
629 -> Option<Retained<AVVideoCompositionCoreAnimationTool>>;
630
631 /// Setter for [`animationTool`][Self::animationTool].
632 #[unsafe(method(setAnimationTool:))]
633 #[unsafe(method_family = none)]
634 pub unsafe fn setAnimationTool(
635 &self,
636 animation_tool: Option<&AVVideoCompositionCoreAnimationTool>,
637 );
638
639 /// List of all track IDs for tracks from which sample data should be presented to the compositor at any point in the overall composition. Currently only tracks of type kCMMediaType_Metadata are allowed to be specified.
640 #[unsafe(method(sourceSampleDataTrackIDs))]
641 #[unsafe(method_family = none)]
642 pub unsafe fn sourceSampleDataTrackIDs(&self) -> Retained<NSArray<NSNumber>>;
643
644 /// Setter for [`sourceSampleDataTrackIDs`][Self::sourceSampleDataTrackIDs].
645 ///
646 /// This is [copied][objc2_foundation::NSCopying::copy] when set.
647 #[unsafe(method(setSourceSampleDataTrackIDs:))]
648 #[unsafe(method_family = none)]
649 pub unsafe fn setSourceSampleDataTrackIDs(
650 &self,
651 source_sample_data_track_i_ds: &NSArray<NSNumber>,
652 );
653
654 /// The output buffers of the video composition can be specified with the outputBufferDescription. The value is an array of CMTagCollectionRef objects that describes the output buffers.
655 ///
656 /// If the video composition will output tagged buffers, the details of those buffers should be specified with CMTags. Specifically, the StereoView (eyes) and ProjectionKind must be specified. The behavior is undefined if the output tagged buffers do not match the outputBufferDescription.
657 /// The default is nil, which means monoscopic output. Note that an empty array is not valid. An exception will be thrown if the objects in the array are not of type CMTagCollectionRef.
658 /// Note that tagged buffers are only supported for custom compositors.
659 #[unsafe(method(outputBufferDescription))]
660 #[unsafe(method_family = none)]
661 pub unsafe fn outputBufferDescription(&self) -> Option<Retained<NSArray>>;
662
663 /// Setter for [`outputBufferDescription`][Self::outputBufferDescription].
664 ///
665 /// This is [copied][objc2_foundation::NSCopying::copy] when set.
666 ///
667 /// # Safety
668 ///
669 /// `output_buffer_description` generic should be of the correct type.
670 #[unsafe(method(setOutputBufferDescription:))]
671 #[unsafe(method_family = none)]
672 pub unsafe fn setOutputBufferDescription(
673 &self,
674 output_buffer_description: Option<&NSArray>,
675 );
676 );
677}
678
679/// Methods declared on superclass `NSObject`.
680impl AVMutableVideoComposition {
681 extern_methods!(
682 #[unsafe(method(init))]
683 #[unsafe(method_family = init)]
684 pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
685
686 #[unsafe(method(new))]
687 #[unsafe(method_family = new)]
688 pub unsafe fn new() -> Retained<Self>;
689 );
690}
691
692/// AVMutableVideoCompositionColorimetery.
693///
694/// Indicates the color space of the frames output from the video composition.
695///
696/// Collectively the properties colorPrimaries, colorYCbCrMatrix, and colorTransferFunction define the color space that the rendered frames will be tagged with. For custom video compositing these properties are also used to specify the required color space of the source frames.
697///
698/// For examples of common color spaces see AVVideoSettings.h.
699///
700/// How to preserve the color space of the source frames:
701///
702/// Decide which color space to be preserved by examining the source asset's video tracks. Copy the source track's primaries, matrix and transfer function into the video composition's colorPrimaries, colorYCbCrMatrix and colorTransferFunction respectively.
703///
704/// - When using custom video compositing
705/// Setting these properties will cause source frames to be converted into the specified color space and tagged as such. New frames allocated using -[AVVideoCompositionRenderContext newPixelBuffer] will also be tagged correctly.
706///
707/// - When using Core Image via videoCompositionWithAsset:options:applyingCIFiltersWithHandler:
708/// Setting these properties will cause source frames to be converted into the specified color space and tagged as such. The source frames provided as CIImages will have the appropriate CGColorSpace applied. The color space is preserved when the output CIImage is finally rendered internally.
709///
710/// - When using basic compositing (i.e. AVVideoCompositionLayerInstruction)
711/// Setting these properties will ensure that the internal compositor renders (or passes through) frames in specified color space and are tagged as such.
712impl AVMutableVideoComposition {
713 extern_methods!(
714 /// Rendering will use these primaries and frames will be tagged as such. If the value of this property is nil then the source's primaries will be propagated and used.
715 ///
716 /// Default is nil. Valid values are those suitable for AVVideoColorPrimariesKey. Generally set as a triple along with colorYCbCrMatrix and colorTransferFunction.
717 #[unsafe(method(colorPrimaries))]
718 #[unsafe(method_family = none)]
719 pub unsafe fn colorPrimaries(&self) -> Option<Retained<NSString>>;
720
721 /// Setter for [`colorPrimaries`][Self::colorPrimaries].
722 ///
723 /// This is [copied][objc2_foundation::NSCopying::copy] when set.
724 #[unsafe(method(setColorPrimaries:))]
725 #[unsafe(method_family = none)]
726 pub unsafe fn setColorPrimaries(&self, color_primaries: Option<&NSString>);
727
728 /// Rendering will use this matrix and frames will be tagged as such. If the value of this property is nil then the source's matrix will be propagated and used.
729 ///
730 /// Default is nil. Valid values are those suitable for AVVideoYCbCrMatrixKey. Generally set as a triple along with colorPrimaries and colorTransferFunction.
731 #[unsafe(method(colorYCbCrMatrix))]
732 #[unsafe(method_family = none)]
733 pub unsafe fn colorYCbCrMatrix(&self) -> Option<Retained<NSString>>;
734
735 /// Setter for [`colorYCbCrMatrix`][Self::colorYCbCrMatrix].
736 ///
737 /// This is [copied][objc2_foundation::NSCopying::copy] when set.
738 #[unsafe(method(setColorYCbCrMatrix:))]
739 #[unsafe(method_family = none)]
740 pub unsafe fn setColorYCbCrMatrix(&self, color_y_cb_cr_matrix: Option<&NSString>);
741
742 /// Rendering will use this transfer function and frames will be tagged as such. If the value of this property is nil then the source's transfer function will be propagated and used.
743 ///
744 /// Default is nil. Valid values are those suitable for AVVideoTransferFunctionKey. Generally set as a triple along with colorYCbCrMatrix and colorYCbCrMatrix.
745 #[unsafe(method(colorTransferFunction))]
746 #[unsafe(method_family = none)]
747 pub unsafe fn colorTransferFunction(&self) -> Option<Retained<NSString>>;
748
749 /// Setter for [`colorTransferFunction`][Self::colorTransferFunction].
750 ///
751 /// This is [copied][objc2_foundation::NSCopying::copy] when set.
752 #[unsafe(method(setColorTransferFunction:))]
753 #[unsafe(method_family = none)]
754 pub unsafe fn setColorTransferFunction(&self, color_transfer_function: Option<&NSString>);
755
756 /// Configures policy for per frame HDR display metadata on the rendered frame
757 ///
758 /// Allows the system to identify situations where HDR metadata can be generated and attached to the rendered video frame.
759 /// Default is AVVideoCompositionPerFrameHDRDisplayMetadataPolicyPropagate. Any HDR metadata attached to the composed frame will be propagated to the rendered video frames.
760 #[unsafe(method(perFrameHDRDisplayMetadataPolicy))]
761 #[unsafe(method_family = none)]
762 pub unsafe fn perFrameHDRDisplayMetadataPolicy(
763 &self,
764 ) -> Retained<AVVideoCompositionPerFrameHDRDisplayMetadataPolicy>;
765
766 /// Setter for [`perFrameHDRDisplayMetadataPolicy`][Self::perFrameHDRDisplayMetadataPolicy].
767 ///
768 /// This is [copied][objc2_foundation::NSCopying::copy] when set.
769 #[unsafe(method(setPerFrameHDRDisplayMetadataPolicy:))]
770 #[unsafe(method_family = none)]
771 pub unsafe fn setPerFrameHDRDisplayMetadataPolicy(
772 &self,
773 per_frame_hdr_display_metadata_policy: &AVVideoCompositionPerFrameHDRDisplayMetadataPolicy,
774 );
775 );
776}
777
778/// AVMutableVideoCompositionFiltering.
779impl AVMutableVideoComposition {
780 extern_methods!(
781 #[cfg(all(
782 feature = "AVAsset",
783 feature = "AVVideoCompositing",
784 feature = "block2"
785 ))]
786 /// Returns a new instance of AVMutableVideoComposition with values and instructions that will apply the specified handler block to video frames represented as instances of CIImage.
787 ///
788 /// The returned AVMutableVideoComposition will cause the specified handler block to be called to filter each frame of the asset's first enabled video track. The handler block should use the properties of the provided AVAsynchronousCIImageFilteringRequest and respond using finishWithImage:context: with a "filtered" new CIImage (or the provided source image for no affect). In the event of an error, respond to the request using finishWithError:. The error can be observed via AVPlayerItemFailedToPlayToEndTimeNotification, see AVPlayerItemFailedToPlayToEndTimeErrorKey in notification payload. The client can set sourceTrackIDForFrameTiming to kCMPersistentTrackID_Invalid and frameDuration to an appropriate value in order to specify the maximum output frame rate independent of the source track timing.
789 ///
790 /// The video composition will also have the following values for its properties:
791 ///
792 /// - The original timing of the asset's first enabled video track will be used.
793 /// - A renderSize that encompasses the asset's first enabled video track respecting the track's preferredTransform.
794 /// - A renderScale of 1.0.
795 ///
796 /// The default CIContext has the following properties:
797 ///
798 /// - iOS: Device RGB color space
799 /// - macOS: sRGB color space
800 ///
801 /// Example usage:
802 /// ```objc
803 /// playerItem.videoComposition = [AVMutableVideoComposition videoCompositionWithAsset:srcAsset applyingCIFiltersWithHandler:
804 /// ^(AVAsynchronousCIImageFilteringRequest *request)
805 /// {
806 /// NSError *err = nil;
807 /// CIImage *filtered = myRenderer(request,
808 /// &err
809 /// );
810 /// if (filtered)
811 /// [request finishWithImage:filtered context:nil];
812 /// else
813 /// [request finishWithError:err];
814 /// }];
815 /// ```
816 /// - Parameter asset: An instance of AVAsset. For best performance, ensure that the duration and tracks properties of the asset are already loaded before invoking this method.
817 ///
818 /// - Returns: An instance of AVMutableVideoComposition.
819 ///
820 /// # Safety
821 ///
822 /// `applier` block must be sendable.
823 #[deprecated = "Use videoCompositionWithAsset:applyingCIFiltersWithHandler:completionHandler: instead"]
824 #[unsafe(method(videoCompositionWithAsset:applyingCIFiltersWithHandler:))]
825 #[unsafe(method_family = none)]
826 pub unsafe fn videoCompositionWithAsset_applyingCIFiltersWithHandler(
827 asset: &AVAsset,
828 applier: &block2::DynBlock<dyn Fn(NonNull<AVAsynchronousCIImageFilteringRequest>)>,
829 ) -> Retained<AVMutableVideoComposition>;
830
831 #[cfg(all(
832 feature = "AVAsset",
833 feature = "AVVideoCompositing",
834 feature = "block2"
835 ))]
836 /// Vends a new instance of AVMutableVideoComposition with values and instructions that will apply the specified handler block to video frames represented as instances of CIImage.
837 ///
838 /// The new AVMutableVideoComposition will cause the specified handler block to be called to filter each frame of the asset's first enabled video track. The handler block should use the properties of the provided AVAsynchronousCIImageFilteringRequest and respond using finishWithImage:context: with a "filtered" new CIImage (or the provided source image for no affect). In the event of an error, respond to the request using finishWithError:. The error can be observed via AVPlayerItemFailedToPlayToEndTimeNotification, see AVPlayerItemFailedToPlayToEndTimeErrorKey in notification payload. The client can set sourceTrackIDForFrameTiming to kCMPersistentTrackID_Invalid and frameDuration to an appropriate value in order to specify the maximum output frame rate independent of the source track timing.
839 ///
840 /// The video composition will also have the following values for its properties:
841 ///
842 /// - The original timing of the asset's first enabled video track will be used.
843 /// - A renderSize that encompasses the asset's first enabled video track respecting the track's preferredTransform.
844 /// - A renderScale of 1.0.
845 ///
846 /// The default CIContext has the following properties:
847 ///
848 /// - iOS: Device RGB color space
849 /// - macOS: sRGB color space
850 ///
851 /// Example usage:
852 /// ```objc
853 /// [AVMutableVideoComposition videoCompositionWithAsset:srcAsset applyingCIFiltersWithHandler:
854 /// ^(AVAsynchronousCIImageFilteringRequest *request)
855 /// {
856 /// NSError *err = nil;
857 /// CIImage *filtered = myRenderer(request,
858 /// &err
859 /// );
860 /// if (filtered)
861 /// [request finishWithImage:filtered context:nil];
862 /// else
863 /// [request finishWithError:err];
864 /// } completionHandler:
865 /// ^(AVMutableVideoComposition * _Nullable videoComposition, NSError * _Nullable error)
866 /// {
867 /// if (videoComposition != nil) {
868 /// playerItem.videoComposition = videoComposition
869 /// else {
870 /// // handle error
871 /// }];
872 /// ```
873 /// - Parameter asset: An instance of AVAsset.
874 /// - Parameter completionHandler: A block that is invoked when the new video composition has finished being created. If the `videoComposition` parameter is nil, the `error` parameter describes the failure that occurred.
875 ///
876 /// # Safety
877 ///
878 /// - `applier` block must be sendable.
879 /// - `completion_handler` block must be sendable.
880 #[unsafe(method(videoCompositionWithAsset:applyingCIFiltersWithHandler:completionHandler:))]
881 #[unsafe(method_family = none)]
882 pub unsafe fn videoCompositionWithAsset_applyingCIFiltersWithHandler_completionHandler(
883 asset: &AVAsset,
884 applier: &block2::DynBlock<dyn Fn(NonNull<AVAsynchronousCIImageFilteringRequest>)>,
885 completion_handler: &block2::DynBlock<
886 dyn Fn(*mut AVMutableVideoComposition, *mut NSError),
887 >,
888 );
889 );
890}
891
892extern_class!(
893 /// An AVVideoCompositionInstruction object represents an operation to be performed by a compositor.
894 ///
895 /// An AVVideoComposition object maintains an array of instructions to perform its composition. This class is not intended to be subclassed; instead, conform to AVVideoCompositionInstructionProtocol ("AVVideoCompositionInstruction" in Objective-C). Subclasses of this type that are used from Swift must fulfill the requirements of a Sendable type.
896 ///
897 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avvideocompositioninstruction?language=objc)
898 #[unsafe(super(NSObject))]
899 #[derive(Debug, PartialEq, Eq, Hash)]
900 pub struct AVVideoCompositionInstruction;
901);
902
903unsafe impl Send for AVVideoCompositionInstruction {}
904
905unsafe impl Sync for AVVideoCompositionInstruction {}
906
907#[cfg(feature = "AVVideoCompositing")]
908extern_conformance!(
909 unsafe impl AVVideoCompositionInstructionProtocol for AVVideoCompositionInstruction {}
910);
911
912extern_conformance!(
913 unsafe impl NSCoding for AVVideoCompositionInstruction {}
914);
915
916extern_conformance!(
917 unsafe impl NSCopying for AVVideoCompositionInstruction {}
918);
919
920unsafe impl CopyingHelper for AVVideoCompositionInstruction {
921 type Result = Self;
922}
923
924extern_conformance!(
925 unsafe impl NSMutableCopying for AVVideoCompositionInstruction {}
926);
927
928unsafe impl MutableCopyingHelper for AVVideoCompositionInstruction {
929 type Result = AVMutableVideoCompositionInstruction;
930}
931
932extern_conformance!(
933 unsafe impl NSObjectProtocol for AVVideoCompositionInstruction {}
934);
935
936extern_conformance!(
937 unsafe impl NSSecureCoding for AVVideoCompositionInstruction {}
938);
939
940impl AVVideoCompositionInstruction {
941 extern_methods!(
942 /// Pass-through initializer, for internal use in AVFoundation only
943 #[unsafe(method(videoCompositionInstructionWithInstruction:))]
944 #[unsafe(method_family = none)]
945 pub unsafe fn videoCompositionInstructionWithInstruction(
946 instruction: &AVVideoCompositionInstruction,
947 ) -> Retained<AVVideoCompositionInstruction>;
948
949 #[cfg(feature = "objc2-core-media")]
950 /// Indicates the timeRange during which the instruction is effective. Note requirements for the timeRanges of instructions described in connection with AVVideoComposition's instructions key above.
951 ///
952 /// This property is not atomic.
953 ///
954 /// # Safety
955 ///
956 /// This might not be thread-safe.
957 #[unsafe(method(timeRange))]
958 #[unsafe(method_family = none)]
959 pub unsafe fn timeRange(&self) -> CMTimeRange;
960
961 #[cfg(feature = "objc2-core-graphics")]
962 /// Indicates the background color of the composition.
963 ///
964 /// Solid BGRA colors only are supported; patterns and other color refs that are not supported will be ignored.
965 /// - If the background color is not specified the video compositor will use a default backgroundColor of opaque black.
966 /// - If the rendered pixel buffer does not have alpha, the alpha value of the backgroundColor will be ignored.
967 ///
968 /// This property is not atomic.
969 ///
970 /// # Safety
971 ///
972 /// This might not be thread-safe.
973 #[unsafe(method(backgroundColor))]
974 #[unsafe(method_family = none)]
975 pub unsafe fn backgroundColor(&self) -> Option<Retained<CGColor>>;
976
977 /// Provides an array of instances of AVVideoCompositionLayerInstruction that specify how video frames from source tracks should be layered and composed.
978 ///
979 /// Tracks are layered in the composition according to the top-to-bottom order of the layerInstructions array; the track with trackID of the first instruction in the array will be layered on top, with the track with the trackID of the second instruction immediately underneath, etc. If this key is nil, the output will be a fill of the background color.
980 ///
981 /// This property is not atomic.
982 ///
983 /// # Safety
984 ///
985 /// This might not be thread-safe.
986 #[unsafe(method(layerInstructions))]
987 #[unsafe(method_family = none)]
988 pub unsafe fn layerInstructions(
989 &self,
990 ) -> Retained<NSArray<AVVideoCompositionLayerInstruction>>;
991
992 /// If NO, indicates that post-processing should be skipped for the duration of this instruction. YES by default.
993 ///
994 /// See +[AVVideoCompositionCoreAnimationTool videoCompositionToolWithPostProcessingAsVideoLayer:inLayer:].
995 ///
996 /// This property is not atomic.
997 ///
998 /// # Safety
999 ///
1000 /// This might not be thread-safe.
1001 #[unsafe(method(enablePostProcessing))]
1002 #[unsafe(method_family = none)]
1003 pub unsafe fn enablePostProcessing(&self) -> bool;
1004
1005 /// List of video track IDs required to compose frames for this instruction. The value of this property is computed from the layer instructions.
1006 ///
1007 /// This property is not atomic.
1008 ///
1009 /// # Safety
1010 ///
1011 /// This might not be thread-safe.
1012 #[unsafe(method(requiredSourceTrackIDs))]
1013 #[unsafe(method_family = none)]
1014 pub unsafe fn requiredSourceTrackIDs(&self) -> Retained<NSArray<NSValue>>;
1015
1016 #[cfg(feature = "objc2-core-media")]
1017 /// If the video composition result is one of the source frames for the duration of the instruction, this property returns the corresponding track ID. The compositor won't be run for the duration of the instruction and the proper source frame will be used instead. The value of this property is computed from the layer instructions
1018 ///
1019 /// This property is not atomic.
1020 ///
1021 /// # Safety
1022 ///
1023 /// This might not be thread-safe.
1024 #[unsafe(method(passthroughTrackID))]
1025 #[unsafe(method_family = none)]
1026 pub unsafe fn passthroughTrackID(&self) -> CMPersistentTrackID;
1027
1028 /// List of track IDs for which sample data should be presented to the compositor for this instruction.
1029 ///
1030 /// This property is not atomic.
1031 ///
1032 /// # Safety
1033 ///
1034 /// This might not be thread-safe.
1035 #[unsafe(method(requiredSourceSampleDataTrackIDs))]
1036 #[unsafe(method_family = none)]
1037 pub unsafe fn requiredSourceSampleDataTrackIDs(&self) -> Retained<NSArray<NSNumber>>;
1038 );
1039}
1040
1041/// Methods declared on superclass `NSObject`.
1042impl AVVideoCompositionInstruction {
1043 extern_methods!(
1044 #[unsafe(method(init))]
1045 #[unsafe(method_family = init)]
1046 pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
1047
1048 #[unsafe(method(new))]
1049 #[unsafe(method_family = new)]
1050 pub unsafe fn new() -> Retained<Self>;
1051 );
1052}
1053
1054extern_class!(
1055 /// [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avmutablevideocompositioninstruction?language=objc)
1056 #[unsafe(super(AVVideoCompositionInstruction, NSObject))]
1057 #[derive(Debug, PartialEq, Eq, Hash)]
1058 pub struct AVMutableVideoCompositionInstruction;
1059);
1060
1061#[cfg(feature = "AVVideoCompositing")]
1062extern_conformance!(
1063 unsafe impl AVVideoCompositionInstructionProtocol for AVMutableVideoCompositionInstruction {}
1064);
1065
1066extern_conformance!(
1067 unsafe impl NSCoding for AVMutableVideoCompositionInstruction {}
1068);
1069
1070extern_conformance!(
1071 unsafe impl NSCopying for AVMutableVideoCompositionInstruction {}
1072);
1073
1074unsafe impl CopyingHelper for AVMutableVideoCompositionInstruction {
1075 type Result = AVVideoCompositionInstruction;
1076}
1077
1078extern_conformance!(
1079 unsafe impl NSMutableCopying for AVMutableVideoCompositionInstruction {}
1080);
1081
1082unsafe impl MutableCopyingHelper for AVMutableVideoCompositionInstruction {
1083 type Result = Self;
1084}
1085
1086extern_conformance!(
1087 unsafe impl NSObjectProtocol for AVMutableVideoCompositionInstruction {}
1088);
1089
1090extern_conformance!(
1091 unsafe impl NSSecureCoding for AVMutableVideoCompositionInstruction {}
1092);
1093
1094impl AVMutableVideoCompositionInstruction {
1095 extern_methods!(
1096 /// Returns a new instance of AVMutableVideoCompositionInstruction.
1097 ///
1098 /// The returned AVMutableVideoCompositionInstruction will have a timeRange of kCMTimeRangeInvalid, a NULL backgroundColor, and a nil array of layerInstructions.
1099 #[unsafe(method(videoCompositionInstruction))]
1100 #[unsafe(method_family = none)]
1101 pub unsafe fn videoCompositionInstruction() -> Retained<Self>;
1102
1103 #[cfg(feature = "objc2-core-media")]
1104 /// Indicates the timeRange during which the instruction is effective. Note requirements for the timeRanges of instructions described in connection with AVVideoComposition's instructions key above.
1105 ///
1106 /// This property is not atomic.
1107 ///
1108 /// # Safety
1109 ///
1110 /// This might not be thread-safe.
1111 #[unsafe(method(timeRange))]
1112 #[unsafe(method_family = none)]
1113 pub unsafe fn timeRange(&self) -> CMTimeRange;
1114
1115 #[cfg(feature = "objc2-core-media")]
1116 /// Setter for [`timeRange`][Self::timeRange].
1117 ///
1118 /// # Safety
1119 ///
1120 /// This might not be thread-safe.
1121 #[unsafe(method(setTimeRange:))]
1122 #[unsafe(method_family = none)]
1123 pub unsafe fn setTimeRange(&self, time_range: CMTimeRange);
1124
1125 #[cfg(feature = "objc2-core-graphics")]
1126 /// Indicates the background color of the composition.
1127 ///
1128 /// Solid BGRA colors only are supported; patterns and other color refs that are not supported will be ignored.
1129 /// - If the background color is not specified the video compositor will use a default backgroundColor of opaque black.
1130 /// - If the rendered pixel buffer does not have alpha, the alpha value of the backgroundColor will be ignored.
1131 ///
1132 /// This property is not atomic.
1133 ///
1134 /// # Safety
1135 ///
1136 /// This might not be thread-safe.
1137 #[unsafe(method(backgroundColor))]
1138 #[unsafe(method_family = none)]
1139 pub unsafe fn backgroundColor(&self) -> Option<Retained<CGColor>>;
1140
1141 #[cfg(feature = "objc2-core-graphics")]
1142 /// Setter for [`backgroundColor`][Self::backgroundColor].
1143 ///
1144 /// # Safety
1145 ///
1146 /// This might not be thread-safe.
1147 #[unsafe(method(setBackgroundColor:))]
1148 #[unsafe(method_family = none)]
1149 pub unsafe fn setBackgroundColor(&self, background_color: Option<&CGColor>);
1150
1151 /// Provides an array of instances of AVVideoCompositionLayerInstruction that specify how video frames from source tracks should be layered and composed.
1152 ///
1153 /// Tracks are layered in the composition according to the top-to-bottom order of the layerInstructions array; the track with trackID of the first instruction in the array will be layered on top, with the track with the trackID of the second instruction immediately underneath, etc.
1154 /// If this key is nil, the output will be a fill of the background color.
1155 ///
1156 /// This property is not atomic.
1157 ///
1158 /// # Safety
1159 ///
1160 /// This might not be thread-safe.
1161 #[unsafe(method(layerInstructions))]
1162 #[unsafe(method_family = none)]
1163 pub unsafe fn layerInstructions(
1164 &self,
1165 ) -> Retained<NSArray<AVVideoCompositionLayerInstruction>>;
1166
1167 /// Setter for [`layerInstructions`][Self::layerInstructions].
1168 ///
1169 /// This is [copied][objc2_foundation::NSCopying::copy] when set.
1170 ///
1171 /// # Safety
1172 ///
1173 /// This might not be thread-safe.
1174 #[unsafe(method(setLayerInstructions:))]
1175 #[unsafe(method_family = none)]
1176 pub unsafe fn setLayerInstructions(
1177 &self,
1178 layer_instructions: &NSArray<AVVideoCompositionLayerInstruction>,
1179 );
1180
1181 /// If NO, indicates that post-processing should be skipped for the duration of this instruction. YES by default.
1182 ///
1183 /// See +[AVVideoCompositionCoreAnimationTool videoCompositionToolWithPostProcessingAsVideoLayer:inLayer:].
1184 ///
1185 /// This property is not atomic.
1186 ///
1187 /// # Safety
1188 ///
1189 /// This might not be thread-safe.
1190 #[unsafe(method(enablePostProcessing))]
1191 #[unsafe(method_family = none)]
1192 pub unsafe fn enablePostProcessing(&self) -> bool;
1193
1194 /// Setter for [`enablePostProcessing`][Self::enablePostProcessing].
1195 ///
1196 /// # Safety
1197 ///
1198 /// This might not be thread-safe.
1199 #[unsafe(method(setEnablePostProcessing:))]
1200 #[unsafe(method_family = none)]
1201 pub unsafe fn setEnablePostProcessing(&self, enable_post_processing: bool);
1202
1203 /// List of sample data track IDs required to compose frames for this instruction.
1204 ///
1205 /// Currently only tracks of type kCMMediaType_Metadata are allowed to be specified. If this property is unspecified or is an empty array, no sample data is considered to be required for this instruction. Note that you must also specify all tracks for which sample data is required for ANY instruction in the AVVideoComposition, in AVVideoComposition's property sourceSampleDataTrackIDs.
1206 ///
1207 /// This property is not atomic.
1208 ///
1209 /// # Safety
1210 ///
1211 /// This might not be thread-safe.
1212 #[unsafe(method(requiredSourceSampleDataTrackIDs))]
1213 #[unsafe(method_family = none)]
1214 pub unsafe fn requiredSourceSampleDataTrackIDs(&self) -> Retained<NSArray<NSNumber>>;
1215
1216 /// Setter for [`requiredSourceSampleDataTrackIDs`][Self::requiredSourceSampleDataTrackIDs].
1217 ///
1218 /// This is [copied][objc2_foundation::NSCopying::copy] when set.
1219 ///
1220 /// # Safety
1221 ///
1222 /// This might not be thread-safe.
1223 #[unsafe(method(setRequiredSourceSampleDataTrackIDs:))]
1224 #[unsafe(method_family = none)]
1225 pub unsafe fn setRequiredSourceSampleDataTrackIDs(
1226 &self,
1227 required_source_sample_data_track_i_ds: &NSArray<NSNumber>,
1228 );
1229 );
1230}
1231
1232/// Methods declared on superclass `NSObject`.
1233impl AVMutableVideoCompositionInstruction {
1234 extern_methods!(
1235 #[unsafe(method(init))]
1236 #[unsafe(method_family = init)]
1237 pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
1238
1239 #[unsafe(method(new))]
1240 #[unsafe(method_family = new)]
1241 pub unsafe fn new() -> Retained<Self>;
1242 );
1243}
1244
1245extern_class!(
1246 /// An AVVideoCompositionLayerInstruction object represents the transform, opacity, and cropping ramps to apply to a given track. Subclasses of this type that are used from Swift must fulfill the requirements of a Sendable type.
1247 ///
1248 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avvideocompositionlayerinstruction?language=objc)
1249 #[unsafe(super(NSObject))]
1250 #[derive(Debug, PartialEq, Eq, Hash)]
1251 pub struct AVVideoCompositionLayerInstruction;
1252);
1253
1254unsafe impl Send for AVVideoCompositionLayerInstruction {}
1255
1256unsafe impl Sync for AVVideoCompositionLayerInstruction {}
1257
1258extern_conformance!(
1259 unsafe impl NSCoding for AVVideoCompositionLayerInstruction {}
1260);
1261
1262extern_conformance!(
1263 unsafe impl NSCopying for AVVideoCompositionLayerInstruction {}
1264);
1265
1266unsafe impl CopyingHelper for AVVideoCompositionLayerInstruction {
1267 type Result = Self;
1268}
1269
1270extern_conformance!(
1271 unsafe impl NSMutableCopying for AVVideoCompositionLayerInstruction {}
1272);
1273
1274unsafe impl MutableCopyingHelper for AVVideoCompositionLayerInstruction {
1275 type Result = AVMutableVideoCompositionLayerInstruction;
1276}
1277
1278extern_conformance!(
1279 unsafe impl NSObjectProtocol for AVVideoCompositionLayerInstruction {}
1280);
1281
1282extern_conformance!(
1283 unsafe impl NSSecureCoding for AVVideoCompositionLayerInstruction {}
1284);
1285
1286impl AVVideoCompositionLayerInstruction {
1287 extern_methods!(
1288 /// Pass-through initializer, for internal use in AVFoundation only
1289 #[unsafe(method(videoCompositionLayerInstructionWithLayerInstruction:))]
1290 #[unsafe(method_family = none)]
1291 pub unsafe fn videoCompositionLayerInstructionWithLayerInstruction(
1292 instruction: &AVVideoCompositionLayerInstruction,
1293 ) -> Retained<AVVideoCompositionLayerInstruction>;
1294
1295 #[cfg(feature = "objc2-core-media")]
1296 /// Indicates the trackID of the source track to which the compositor will apply the instruction.
1297 ///
1298 /// This property is not atomic.
1299 ///
1300 /// # Safety
1301 ///
1302 /// This might not be thread-safe.
1303 #[unsafe(method(trackID))]
1304 #[unsafe(method_family = none)]
1305 pub unsafe fn trackID(&self) -> CMPersistentTrackID;
1306
1307 #[cfg(all(feature = "objc2-core-foundation", feature = "objc2-core-media"))]
1308 /// Obtains the transform ramp that includes the specified time.
1309 ///
1310 /// - Parameter time: If a ramp with a timeRange that contains the specified time has been set, information about the effective ramp for that time is supplied. Otherwise, information about the first ramp that starts after the specified time is supplied.
1311 /// - Parameter startTransform: A pointer to a float to receive the starting transform value for the transform ramp. May be NULL.
1312 /// - Parameter endTransform: A pointer to a float to receive the ending transform value for the transform ramp. May be NULL.
1313 /// - Parameter timeRange: A pointer to a CMTimeRange to receive the timeRange of the transform ramp. May be NULL.
1314 ///
1315 /// - Returns: An indication of success. NO will be returned if the specified time is beyond the duration of the last transform ramp that has been set.
1316 ///
1317 /// # Safety
1318 ///
1319 /// - `start_transform` must be a valid pointer or null.
1320 /// - `end_transform` must be a valid pointer or null.
1321 /// - `time_range` must be a valid pointer or null.
1322 #[unsafe(method(getTransformRampForTime:startTransform:endTransform:timeRange:))]
1323 #[unsafe(method_family = none)]
1324 pub unsafe fn getTransformRampForTime_startTransform_endTransform_timeRange(
1325 &self,
1326 time: CMTime,
1327 start_transform: *mut CGAffineTransform,
1328 end_transform: *mut CGAffineTransform,
1329 time_range: *mut CMTimeRange,
1330 ) -> bool;
1331
1332 #[cfg(feature = "objc2-core-media")]
1333 /// Obtains the opacity ramp that includes the specified time.
1334 ///
1335 /// - Parameter time: If a ramp with a timeRange that contains the specified time has been set, information about the effective ramp for that time is supplied. Otherwise, information about the first ramp that starts after the specified time is supplied.
1336 /// - Parameter startOpacity: A pointer to a float to receive the starting opacity value for the opacity ramp. May be NULL.
1337 /// - Parameter endOpacity: A pointer to a float to receive the ending opacity value for the opacity ramp. May be NULL.
1338 /// - Parameter timeRange: A pointer to a CMTimeRange to receive the timeRange of the opacity ramp. May be NULL.
1339 ///
1340 /// - Returns: An indication of success. NO will be returned if the specified time is beyond the duration of the last opacity ramp that has been set.
1341 ///
1342 /// # Safety
1343 ///
1344 /// - `start_opacity` must be a valid pointer or null.
1345 /// - `end_opacity` must be a valid pointer or null.
1346 /// - `time_range` must be a valid pointer or null.
1347 #[unsafe(method(getOpacityRampForTime:startOpacity:endOpacity:timeRange:))]
1348 #[unsafe(method_family = none)]
1349 pub unsafe fn getOpacityRampForTime_startOpacity_endOpacity_timeRange(
1350 &self,
1351 time: CMTime,
1352 start_opacity: *mut c_float,
1353 end_opacity: *mut c_float,
1354 time_range: *mut CMTimeRange,
1355 ) -> bool;
1356
1357 #[cfg(all(feature = "objc2-core-foundation", feature = "objc2-core-media"))]
1358 /// Obtains the crop rectangle ramp that includes the specified time.
1359 ///
1360 /// - Parameter time: If a ramp with a timeRange that contains the specified time has been set, information about the effective ramp for that time is supplied. Otherwise, information about the first ramp that starts after the specified time is supplied.
1361 /// - Parameter startCropRectangle: A pointer to a CGRect to receive the starting crop rectangle value for the crop rectangle ramp. May be NULL.
1362 /// - Parameter endCropRecrangle: A pointer to a CGRect to receive the ending crop rectangle value for the crop rectangle ramp. May be NULL.
1363 /// - Parameter timeRange: A pointer to a CMTimeRange to receive the timeRange of the crop rectangle ramp. May be NULL.
1364 ///
1365 /// - Returns: An indication of success. NO will be returned if the specified time is beyond the duration of the last crop rectangle ramp that has been set.
1366 ///
1367 /// # Safety
1368 ///
1369 /// - `start_crop_rectangle` must be a valid pointer or null.
1370 /// - `end_crop_rectangle` must be a valid pointer or null.
1371 /// - `time_range` must be a valid pointer or null.
1372 #[unsafe(method(getCropRectangleRampForTime:startCropRectangle:endCropRectangle:timeRange:))]
1373 #[unsafe(method_family = none)]
1374 pub unsafe fn getCropRectangleRampForTime_startCropRectangle_endCropRectangle_timeRange(
1375 &self,
1376 time: CMTime,
1377 start_crop_rectangle: *mut CGRect,
1378 end_crop_rectangle: *mut CGRect,
1379 time_range: *mut CMTimeRange,
1380 ) -> bool;
1381 );
1382}
1383
1384/// Methods declared on superclass `NSObject`.
1385impl AVVideoCompositionLayerInstruction {
1386 extern_methods!(
1387 #[unsafe(method(init))]
1388 #[unsafe(method_family = init)]
1389 pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
1390
1391 #[unsafe(method(new))]
1392 #[unsafe(method_family = new)]
1393 pub unsafe fn new() -> Retained<Self>;
1394 );
1395}
1396
1397extern_class!(
1398 /// [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avmutablevideocompositionlayerinstruction?language=objc)
1399 #[unsafe(super(AVVideoCompositionLayerInstruction, NSObject))]
1400 #[derive(Debug, PartialEq, Eq, Hash)]
1401 pub struct AVMutableVideoCompositionLayerInstruction;
1402);
1403
1404extern_conformance!(
1405 unsafe impl NSCoding for AVMutableVideoCompositionLayerInstruction {}
1406);
1407
1408extern_conformance!(
1409 unsafe impl NSCopying for AVMutableVideoCompositionLayerInstruction {}
1410);
1411
1412unsafe impl CopyingHelper for AVMutableVideoCompositionLayerInstruction {
1413 type Result = AVVideoCompositionInstruction;
1414}
1415
1416extern_conformance!(
1417 unsafe impl NSMutableCopying for AVMutableVideoCompositionLayerInstruction {}
1418);
1419
1420unsafe impl MutableCopyingHelper for AVMutableVideoCompositionLayerInstruction {
1421 type Result = Self;
1422}
1423
1424extern_conformance!(
1425 unsafe impl NSObjectProtocol for AVMutableVideoCompositionLayerInstruction {}
1426);
1427
1428extern_conformance!(
1429 unsafe impl NSSecureCoding for AVMutableVideoCompositionLayerInstruction {}
1430);
1431
1432impl AVMutableVideoCompositionLayerInstruction {
1433 extern_methods!(
1434 #[cfg(feature = "AVAssetTrack")]
1435 /// Returns a new instance of AVMutableVideoCompositionLayerInstruction with no transform or opacity ramps and a trackID set to the specified track's trackID.
1436 ///
1437 /// - Parameter track: A reference to an AVAssetTrack.
1438 #[unsafe(method(videoCompositionLayerInstructionWithAssetTrack:))]
1439 #[unsafe(method_family = none)]
1440 pub unsafe fn videoCompositionLayerInstructionWithAssetTrack(
1441 track: &AVAssetTrack,
1442 ) -> Retained<Self>;
1443
1444 /// Returns a new instance of AVMutableVideoCompositionLayerInstruction with no transform or opacity ramps and a trackID initialized to kCMPersistentTrackID_Invalid.
1445 #[unsafe(method(videoCompositionLayerInstruction))]
1446 #[unsafe(method_family = none)]
1447 pub unsafe fn videoCompositionLayerInstruction() -> Retained<Self>;
1448
1449 #[cfg(feature = "objc2-core-media")]
1450 /// Indicates the trackID of the source track to which the compositor will apply the instruction.
1451 ///
1452 /// This property is not atomic.
1453 ///
1454 /// # Safety
1455 ///
1456 /// This might not be thread-safe.
1457 #[unsafe(method(trackID))]
1458 #[unsafe(method_family = none)]
1459 pub unsafe fn trackID(&self) -> CMPersistentTrackID;
1460
1461 #[cfg(feature = "objc2-core-media")]
1462 /// Setter for [`trackID`][Self::trackID].
1463 ///
1464 /// # Safety
1465 ///
1466 /// This might not be thread-safe.
1467 #[unsafe(method(setTrackID:))]
1468 #[unsafe(method_family = none)]
1469 pub unsafe fn setTrackID(&self, track_id: CMPersistentTrackID);
1470
1471 #[cfg(all(feature = "objc2-core-foundation", feature = "objc2-core-media"))]
1472 /// Sets a transform ramp to apply during the specified timerange.
1473 ///
1474 /// For purposes of spatial positioning of video frames, the origin is in the top-left corner, so
1475 /// (a) positive translation values in an affine transform move a video frame right and down; and
1476 /// (b) with an identity transform a video frame is positioned with its top-left corner in the top-left corner of the composited frame.
1477 /// Video frames shall be interpreted at their display sizes (as described by CVImageBufferGetDisplaySize,
1478 /// ie, taking pixel aspect ratio attachments into account) before any affine transform is applied.
1479 ///
1480 /// During a transform ramp, the affine transform is interpolated between the values set at the ramp's start time and end time.
1481 /// Before the first specified time for which a transform is set, the affine transform is held constant at the value of CGAffineTransformIdentity;
1482 /// after the last time for which a transform is set, the affine transform is held constant at that last value;
1483 ///
1484 /// This method throws an exception if the time range overlaps the time range of an existing transform ramp or if the time range of a does not have a numeric start time and duration.
1485 ///
1486 /// - Parameter startTransform: The transform to be applied at the starting time of the timeRange. See the discussion below of how transforms are applied to video frames.
1487 /// - Parameter endTransform: The transform to be applied at the end time of the timeRange.
1488 /// - Parameter timeRange: The timeRange over which the value of the transform will be interpolated between startTransform and endTransform.
1489 #[unsafe(method(setTransformRampFromStartTransform:toEndTransform:timeRange:))]
1490 #[unsafe(method_family = none)]
1491 pub unsafe fn setTransformRampFromStartTransform_toEndTransform_timeRange(
1492 &self,
1493 start_transform: CGAffineTransform,
1494 end_transform: CGAffineTransform,
1495 time_range: CMTimeRange,
1496 );
1497
1498 #[cfg(all(feature = "objc2-core-foundation", feature = "objc2-core-media"))]
1499 /// Sets a value of the transform at a time within the timeRange of the instruction.
1500 ///
1501 /// For purposes of spatial positioning of video frames, the origin is in the top-left corner, so
1502 /// (a) positive translation values in an affine transform move a video frame right and down; and
1503 /// (b) with an identity transform a video frame is positioned with its top-left corner in the top-left corner of the composited frame.
1504 /// Video frames shall be interpreted at their display sizes (as described by CVImageBufferGetDisplaySize,
1505 /// ie, taking pixel aspect ratio attachments into account) before any affine transform is applied.
1506 ///
1507 /// Sets a fixed transform to apply from the specified time until the next time at which a transform is set; this is the same as setting a flat ramp for that time range.
1508 /// Before the first specified time for which a transform is set, the affine transform is held constant at the value of CGAffineTransformIdentity;
1509 /// after the last time for which a transform is set, the affine transform is held constant at that last value;
1510 ///
1511 /// This method throws an exception if time is not numeric.
1512 ///
1513 /// - Parameter transform: The transform to be applied at the specified time. See the discussion below of how transforms are applied to video frames.
1514 /// - Parameter time: A time value within the timeRange of the composition instruction.
1515 #[unsafe(method(setTransform:atTime:))]
1516 #[unsafe(method_family = none)]
1517 pub unsafe fn setTransform_atTime(&self, transform: CGAffineTransform, time: CMTime);
1518
1519 #[cfg(feature = "objc2-core-media")]
1520 /// Sets an opacity ramp to apply during the specified timerange.
1521 ///
1522 /// During an opacity ramp, opacity is computed using a linear interpolation.
1523 /// Before the first time for which an opacity is set, the opacity is held constant at 1.0; after the last specified time, the opacity is held constant at the last value
1524 /// This method throws an exception if the time range of a does not have a numeric start time and duration.
1525 ///
1526 /// - Parameter startOpacity: The opacity to be applied at the starting time of the timeRange. The value must be between 0.0 and 1.0.
1527 /// - Parameter endOpacity: The opacity to be applied at the end time of the timeRange. The value must be between 0.0 and 1.0.
1528 /// - Parameter timeRange: The timeRange over which the value of the opacity will be interpolated between startOpacity and endOpacity.
1529 #[unsafe(method(setOpacityRampFromStartOpacity:toEndOpacity:timeRange:))]
1530 #[unsafe(method_family = none)]
1531 pub unsafe fn setOpacityRampFromStartOpacity_toEndOpacity_timeRange(
1532 &self,
1533 start_opacity: c_float,
1534 end_opacity: c_float,
1535 time_range: CMTimeRange,
1536 );
1537
1538 #[cfg(feature = "objc2-core-media")]
1539 /// Sets a value of the opacity at a time within the timeRange of the instruction.
1540 ///
1541 /// Sets a fixed opacity to apply from the specified time until the next time at which an opacity is set; this is the same as setting a flat ramp for that time range.
1542 /// Before the first time for which an opacity is set, the opacity is held constant at 1.0; after the last specified time, the opacity is held constant at the last value.
1543 /// This method throws an exception if time is not numeric.
1544 ///
1545 /// - Parameter opacity: The opacity to be applied at the specified time. The value must be between 0.0 and 1.0.
1546 /// - Parameter time: A time value within the timeRange of the composition instruction.
1547 #[unsafe(method(setOpacity:atTime:))]
1548 #[unsafe(method_family = none)]
1549 pub unsafe fn setOpacity_atTime(&self, opacity: c_float, time: CMTime);
1550
1551 #[cfg(all(feature = "objc2-core-foundation", feature = "objc2-core-media"))]
1552 /// Sets an crop rectangle ramp to apply during the specified timerange.
1553 ///
1554 /// The origin of the crop rectangle is the top-left corner of the buffer clean aperture rectangle. The crop rectangle is defined in
1555 /// square pixel space, i.e. without taking the pixel aspect ratio into account. Crop rectangles extending outside of the clean aperture,
1556 /// are cropped to the clean aperture.
1557 ///
1558 /// During a crop rectangle ramp, the rectangle is interpolated between the values set at the ramp's start time and end time.
1559 /// When the starting or ending rectangle is empty, interpolations take into account the origin and size of the empty rectangle.
1560 /// Before the first specified time for which a crop rectangle is set, the crop rectangle is held constant to CGRectInfinite
1561 /// after the last time for which a crop rectangle is set, the crop rectangle is held constant at that last value.
1562 ///
1563 /// This method throws an exception if the time range overlaps the time range of an existing crop rectangle ramp, or if the time range does not have a numeric start time and duration.
1564 ///
1565 /// - Parameter startCropRectangle: The crop rectangle to be applied at the starting time of the timeRange. See the discussion below of how crop rectangles are applied to video frames.
1566 /// - Parameter endCropRectangle: The crop rectangle to be applied at the end time of the timeRange.
1567 /// - Parameter timeRange: The timeRange over which the value of the opacity will be interpolated between startCropRectangle and endCropRectangle.
1568 #[unsafe(method(setCropRectangleRampFromStartCropRectangle:toEndCropRectangle:timeRange:))]
1569 #[unsafe(method_family = none)]
1570 pub unsafe fn setCropRectangleRampFromStartCropRectangle_toEndCropRectangle_timeRange(
1571 &self,
1572 start_crop_rectangle: CGRect,
1573 end_crop_rectangle: CGRect,
1574 time_range: CMTimeRange,
1575 );
1576
1577 #[cfg(all(feature = "objc2-core-foundation", feature = "objc2-core-media"))]
1578 /// Sets a value of the crop rectangle at a time within the timeRange of the instruction.
1579 ///
1580 /// The origin of the crop rectangle is the top-left corner of the buffer clean aperture rectangle. The crop rectangle is defined in
1581 /// square pixel space, i.e. without taking the pixel aspect ratio into account. Crop rectangles extending outside of the clean aperture,
1582 /// are cropped to the clean aperture.
1583 ///
1584 /// Sets a fixed crop rectangle to apply from the specified time until the next time at which a crop rectangle is set; this is the same as setting a flat ramp for that time range.
1585 /// Before the first specified time for which a crop rectangle is set, the crop rectangle is held constant to CGRectInfinite
1586 /// after the last time for which a crop rectangle is set, the crop rectangle is held constant at that last value.
1587 ///
1588 /// This method throws an exception if time is not numeric.
1589 ///
1590 /// - Parameter cropRectangle: The crop rectangle to be applied at the specified time. See the discussion below of how crop rectangles are applied to video frames.
1591 /// - Parameter time: A time value within the timeRange of the composition instruction.
1592 #[unsafe(method(setCropRectangle:atTime:))]
1593 #[unsafe(method_family = none)]
1594 pub unsafe fn setCropRectangle_atTime(&self, crop_rectangle: CGRect, time: CMTime);
1595 );
1596}
1597
1598/// Methods declared on superclass `NSObject`.
1599impl AVMutableVideoCompositionLayerInstruction {
1600 extern_methods!(
1601 #[unsafe(method(init))]
1602 #[unsafe(method_family = init)]
1603 pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
1604
1605 #[unsafe(method(new))]
1606 #[unsafe(method_family = new)]
1607 pub unsafe fn new() -> Retained<Self>;
1608 );
1609}
1610
1611extern_class!(
1612 /// A tool for using Core Animation in a video composition.
1613 ///
1614 /// Instances of AVVideoCompositionCoreAnimationTool are for use with offline rendering (AVAssetExportSession and AVAssetReader), not with AVPlayer.
1615 /// To synchronize real-time playback with other CoreAnimation layers, use AVSynchronizedLayer.
1616 ///
1617 /// Any animations will be interpreted on the video's timeline, not real-time, so
1618 /// (a) set animation beginTimes to small positive value such as AVCoreAnimationBeginTimeAtZero rather than 0,
1619 /// because CoreAnimation will replace a value of 0 with CACurrentMediaTime();
1620 /// (b) set removedOnCompletion to NO on animations so they are not automatically removed;
1621 /// (c) do not use layers associated with UIViews.
1622 ///
1623 /// Subclasses of this type that are used from Swift must fulfill the requirements of a Sendable type.
1624 ///
1625 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avvideocompositioncoreanimationtool?language=objc)
1626 #[unsafe(super(NSObject))]
1627 #[derive(Debug, PartialEq, Eq, Hash)]
1628 pub struct AVVideoCompositionCoreAnimationTool;
1629);
1630
1631unsafe impl Send for AVVideoCompositionCoreAnimationTool {}
1632
1633unsafe impl Sync for AVVideoCompositionCoreAnimationTool {}
1634
1635extern_conformance!(
1636 unsafe impl NSObjectProtocol for AVVideoCompositionCoreAnimationTool {}
1637);
1638
1639impl AVVideoCompositionCoreAnimationTool {
1640 extern_methods!(
1641 #[cfg(all(feature = "objc2-core-media", feature = "objc2-quartz-core"))]
1642 #[cfg(not(target_os = "watchos"))]
1643 /// Add a Core Animation layer to the video composition
1644 ///
1645 /// Include a Core Animation layer as an individual track input in video composition.
1646 /// This layer should not come from, or be added to, another layer tree.
1647 /// trackID should not match any real trackID in the source. Use -[AVAsset unusedTrackID]
1648 /// to obtain a trackID that's guaranteed not to coincide with the trackID of any track of the asset.
1649 /// AVVideoCompositionInstructions should reference trackID where the rendered animation should be included.
1650 /// For best performance, no transform should be set in the AVVideoCompositionLayerInstruction for this trackID.
1651 /// Be aware that on iOS, CALayers backing a UIView usually have their content flipped (as defined by the
1652 /// -contentsAreFlipped method). It may be required to insert a CALayer with its geometryFlipped property set
1653 /// to YES in the layer hierarchy to get the same result when attaching a CALayer to a AVVideoCompositionCoreAnimationTool
1654 /// as when using it to back a UIView.
1655 #[unsafe(method(videoCompositionCoreAnimationToolWithAdditionalLayer:asTrackID:))]
1656 #[unsafe(method_family = none)]
1657 pub unsafe fn videoCompositionCoreAnimationToolWithAdditionalLayer_asTrackID(
1658 layer: &CALayer,
1659 track_id: CMPersistentTrackID,
1660 ) -> Retained<Self>;
1661
1662 #[cfg(feature = "objc2-quartz-core")]
1663 #[cfg(not(target_os = "watchos"))]
1664 /// Compose the composited video frames with the Core Animation layer
1665 ///
1666 /// Place composited video frames in videoLayer and render animationLayer
1667 /// to produce the final frame. Normally videoLayer should be in animationLayer's sublayer tree.
1668 /// The animationLayer should not come from, or be added to, another layer tree.
1669 /// Be aware that on iOS, CALayers backing a UIView usually have their content flipped (as defined by the
1670 /// -contentsAreFlipped method). It may be required to insert a CALayer with its geometryFlipped property set
1671 /// to YES in the layer hierarchy to get the same result when attaching a CALayer to a AVVideoCompositionCoreAnimationTool
1672 /// as when using it to back a UIView.
1673 #[unsafe(method(videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:inLayer:))]
1674 #[unsafe(method_family = none)]
1675 pub unsafe fn videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer_inLayer(
1676 video_layer: &CALayer,
1677 animation_layer: &CALayer,
1678 ) -> Retained<Self>;
1679
1680 #[cfg(feature = "objc2-quartz-core")]
1681 #[cfg(not(target_os = "watchos"))]
1682 /// Compose the composited video frames with the Core Animation layer
1683 ///
1684 /// Duplicate the composited video frames in each videoLayer and render animationLayer
1685 /// to produce the final frame. Normally videoLayers should be in animationLayer's sublayer tree.
1686 /// The animationLayer should not come from, or be added to, another layer tree.
1687 /// Be aware that on iOS, CALayers backing a UIView usually have their content flipped (as defined by the
1688 /// -contentsAreFlipped method). It may be required to insert a CALayer with its geometryFlipped property set
1689 /// to YES in the layer hierarchy to get the same result when attaching a CALayer to a AVVideoCompositionCoreAnimationTool
1690 /// as when using it to back a UIView.
1691 #[unsafe(method(videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayers:inLayer:))]
1692 #[unsafe(method_family = none)]
1693 pub unsafe fn videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayers_inLayer(
1694 video_layers: &NSArray<CALayer>,
1695 animation_layer: &CALayer,
1696 ) -> Retained<Self>;
1697 );
1698}
1699
1700/// Methods declared on superclass `NSObject`.
1701impl AVVideoCompositionCoreAnimationTool {
1702 extern_methods!(
1703 #[unsafe(method(init))]
1704 #[unsafe(method_family = init)]
1705 pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
1706
1707 #[unsafe(method(new))]
1708 #[unsafe(method_family = new)]
1709 pub unsafe fn new() -> Retained<Self>;
1710 );
1711}
1712
1713/// AVAssetVideoCompositionUtility.
1714#[cfg(feature = "AVAsset")]
1715impl AVAsset {
1716 extern_methods!(
1717 #[cfg(feature = "objc2-core-media")]
1718 #[deprecated = "Use findUnusedTrackIDWithCompletionHandler: instead"]
1719 #[unsafe(method(unusedTrackID))]
1720 #[unsafe(method_family = none)]
1721 pub unsafe fn unusedTrackID(&self) -> CMPersistentTrackID;
1722
1723 #[cfg(all(feature = "block2", feature = "objc2-core-media"))]
1724 /// Loads a track ID that will not collide with any existing track
1725 ///
1726 /// - Parameter completionHandler: A block that is invoked when loading is complete, vending the track ID or an error.
1727 ///
1728 /// # Safety
1729 ///
1730 /// `completion_handler` block must be sendable.
1731 #[unsafe(method(findUnusedTrackIDWithCompletionHandler:))]
1732 #[unsafe(method_family = none)]
1733 pub unsafe fn findUnusedTrackIDWithCompletionHandler(
1734 &self,
1735 completion_handler: &block2::DynBlock<dyn Fn(CMPersistentTrackID, *mut NSError)>,
1736 );
1737 );
1738}
1739
1740/// AVVideoCompositionValidation.
1741impl AVVideoComposition {
1742 extern_methods!(
1743 #[cfg(all(feature = "AVAsset", feature = "objc2-core-media"))]
1744 /// Indicates whether the timeRanges of the receiver's instructions conform to the requirements described for them immediately above (in connection with the instructions property) and also whether all of the layer instructions have a value for trackID that corresponds either to a track of the specified asset or to the receiver's animationTool.
1745 ///
1746 /// In the course of validation, the receiver will invoke its validationDelegate with reference to any trouble spots in the video composition.
1747 /// An exception will be raised if the delegate modifies the receiver's array of instructions or the array of layerInstructions of any AVVideoCompositionInstruction contained therein during validation.
1748 ///
1749 /// - Parameter asset: Pass a reference to an AVAsset if you wish to validate the timeRanges of the instructions against the duration of the asset and the trackIDs of the layer instructions against the asset's tracks. Pass nil to skip that validation. Clients should ensure that the keys
1750 /// "
1751 /// tracks" and @"duration" are already loaded on the AVAsset before validation is attempted.
1752 /// - Parameter timeRange: A CMTimeRange. Only those instructions with timeRanges that overlap with the specified timeRange will be validated. To validate all instructions that may be used for playback or other processing, regardless of timeRange, pass CMTimeRangeMake(kCMTimeZero, kCMTimePositiveInfinity).
1753 /// - Parameter validationDelegate: Indicates an object implementing the AVVideoCompositionValidationHandling protocol to receive information about troublesome portions of a video composition during processing of -isValidForAsset:. May be nil.
1754 #[deprecated = "Use isValidForTracks:assetDuration:timeRange:validationDelegate: instead"]
1755 #[unsafe(method(isValidForAsset:timeRange:validationDelegate:))]
1756 #[unsafe(method_family = none)]
1757 pub unsafe fn isValidForAsset_timeRange_validationDelegate(
1758 &self,
1759 asset: Option<&AVAsset>,
1760 time_range: CMTimeRange,
1761 validation_delegate: Option<&ProtocolObject<dyn AVVideoCompositionValidationHandling>>,
1762 ) -> bool;
1763
1764 #[cfg(all(feature = "AVAsset", feature = "block2", feature = "objc2-core-media"))]
1765 /// Determines whether the timeRanges of the receiver's instructions conform to the requirements described for them immediately above (in connection with the instructions property) and also whether all of the layer instructions have a value for trackID that corresponds either to a track of the specified asset or to the receiver's animationTool.
1766 ///
1767 /// In the course of validation, the receiver will invoke its validationDelegate with reference to any trouble spots in the video composition.
1768 /// An exception will be raised if the delegate modifies the receiver's array of instructions or the array of layerInstructions of any AVVideoCompositionInstruction contained therein during validation.
1769 ///
1770 /// - Parameter asset: Pass a reference to an AVAsset if you wish to validate the timeRanges of the instructions against the duration of the asset and the trackIDs of the layer instructions against the asset's tracks. Pass nil to skip that validation.
1771 /// - Parameter timeRange: A CMTimeRange. Only those instructions with timeRanges that overlap with the specified timeRange will be validated. To validate all instructions that may be used for playback or other processing, regardless of timeRange, pass CMTimeRangeMake(kCMTimeZero, kCMTimePositiveInfinity).
1772 /// - Parameter validationDelegate: Indicates an object implementing the AVVideoCompositionValidationHandling protocol to receive information about troublesome portions of a video composition during processing of -determineValidityForAsset:. May be nil.
1773 /// - Parameter completionHandler: A block that is invoked when a determination is made about whether the video composition is valid. If the `isValid` parameter is NO, either the video composition is not valid, in which case the `error` parameter will be nil, or the answer could not be determined, in which case the `error` parameter will be non-nil and describe the failure that occurred.
1774 ///
1775 /// # Safety
1776 ///
1777 /// `completion_handler` block must be sendable.
1778 #[deprecated]
1779 #[unsafe(method(determineValidityForAsset:timeRange:validationDelegate:completionHandler:))]
1780 #[unsafe(method_family = none)]
1781 pub unsafe fn determineValidityForAsset_timeRange_validationDelegate_completionHandler(
1782 &self,
1783 asset: Option<&AVAsset>,
1784 time_range: CMTimeRange,
1785 validation_delegate: Option<&ProtocolObject<dyn AVVideoCompositionValidationHandling>>,
1786 completion_handler: &block2::DynBlock<dyn Fn(Bool, *mut NSError)>,
1787 );
1788
1789 #[cfg(all(feature = "AVAssetTrack", feature = "objc2-core-media"))]
1790 /// Indicates whether the timeRanges of the receiver's instructions conform to the requirements described for them immediately above (in connection with the instructions property) and also whether all of the layer instructions have a value for trackID that corresponds either to a track of the specified asset or to the receiver's animationTool.
1791 ///
1792 /// In the course of validation, the receiver will invoke its validationDelegate with reference to any trouble spots in the video composition.
1793 /// An exception will be raised if the delegate modifies the receiver's array of instructions or the array of layerInstructions of any AVVideoCompositionInstruction contained therein during validation.
1794 ///
1795 /// - Parameter tracks: Pass a reference to an AVAsset's tracks if you wish to validate the trackIDs of the layer instructions against the asset's tracks. Pass nil to skip that validation. This method throws an exception if the tracks are not all from the same asset.
1796 /// - Parameter duration: Pass an AVAsset if you wish to validate the timeRanges of the instructions against the duration of the asset. Pass kCMTimeInvalid to skip that validation.
1797 /// - Parameter timeRange: A CMTimeRange. Only those instructions with timeRanges that overlap with the specified timeRange will be validated. To validate all instructions that may be used for playback or other processing, regardless of timeRange, pass CMTimeRangeMake(kCMTimeZero, kCMTimePositiveInfinity).
1798 /// - Parameter validationDelegate: Indicates an object implementing the AVVideoCompositionValidationHandling protocol to receive information about troublesome portions of a video composition during processing of -isValidForAsset:. May be nil.
1799 #[unsafe(method(isValidForTracks:assetDuration:timeRange:validationDelegate:))]
1800 #[unsafe(method_family = none)]
1801 pub unsafe fn isValidForTracks_assetDuration_timeRange_validationDelegate(
1802 &self,
1803 tracks: &NSArray<AVAssetTrack>,
1804 duration: CMTime,
1805 time_range: CMTimeRange,
1806 validation_delegate: Option<&ProtocolObject<dyn AVVideoCompositionValidationHandling>>,
1807 ) -> bool;
1808 );
1809}
1810
1811extern_protocol!(
1812 /// [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avvideocompositionvalidationhandling?language=objc)
1813 pub unsafe trait AVVideoCompositionValidationHandling: NSObjectProtocol {
1814 /// Invoked by an instance of AVVideoComposition when validating an instance of AVVideoComposition, to report a key that has an invalid value.
1815 ///
1816 /// - Returns: An indication of whether the AVVideoComposition should continue validation in order to report additional problems that may exist.
1817 #[optional]
1818 #[unsafe(method(videoComposition:shouldContinueValidatingAfterFindingInvalidValueForKey:))]
1819 #[unsafe(method_family = none)]
1820 unsafe fn videoComposition_shouldContinueValidatingAfterFindingInvalidValueForKey(
1821 &self,
1822 video_composition: &AVVideoComposition,
1823 key: &NSString,
1824 ) -> bool;
1825
1826 #[cfg(feature = "objc2-core-media")]
1827 /// Invoked by an instance of AVVideoComposition when validating an instance of AVVideoComposition, to report a timeRange that has no corresponding video composition instruction.
1828 ///
1829 /// - Returns: An indication of whether the AVVideoComposition should continue validation in order to report additional problems that may exist.
1830 #[optional]
1831 #[unsafe(method(videoComposition:shouldContinueValidatingAfterFindingEmptyTimeRange:))]
1832 #[unsafe(method_family = none)]
1833 unsafe fn videoComposition_shouldContinueValidatingAfterFindingEmptyTimeRange(
1834 &self,
1835 video_composition: &AVVideoComposition,
1836 time_range: CMTimeRange,
1837 ) -> bool;
1838
1839 #[cfg(feature = "AVVideoCompositing")]
1840 /// Invoked by an instance of AVVideoComposition when validating an instance of AVVideoComposition, to report a video composition instruction with a timeRange that's invalid, that overlaps with the timeRange of a prior instruction, or that contains times earlier than the timeRange of a prior instruction.
1841 ///
1842 /// Use CMTIMERANGE_IS_INVALID, defined in CMTimeRange.h, to test whether the timeRange itself is invalid. Refer to headerdoc for AVVideoComposition.instructions for a discussion of how timeRanges for instructions must be formulated.
1843 ///
1844 /// - Returns: An indication of whether the AVVideoComposition should continue validation in order to report additional problems that may exist.
1845 #[optional]
1846 #[unsafe(method(videoComposition:shouldContinueValidatingAfterFindingInvalidTimeRangeInInstruction:))]
1847 #[unsafe(method_family = none)]
1848 unsafe fn videoComposition_shouldContinueValidatingAfterFindingInvalidTimeRangeInInstruction(
1849 &self,
1850 video_composition: &AVVideoComposition,
1851 video_composition_instruction: &ProtocolObject<
1852 dyn AVVideoCompositionInstructionProtocol,
1853 >,
1854 ) -> bool;
1855
1856 #[cfg(all(feature = "AVAsset", feature = "AVVideoCompositing"))]
1857 /// Invoked by an instance of AVVideoComposition when validating an instance of AVVideoComposition, to report a video composition layer instruction with a trackID that does not correspond either to the trackID used for the composition's animationTool or to a track of the asset specified in -[AVVideoComposition isValidForAsset:timeRange:delegate:].
1858 ///
1859 /// - Returns: An indication of whether the AVVideoComposition should continue validation in order to report additional problems that may exist.
1860 #[optional]
1861 #[unsafe(method(videoComposition:shouldContinueValidatingAfterFindingInvalidTrackIDInInstruction:layerInstruction:asset:))]
1862 #[unsafe(method_family = none)]
1863 unsafe fn videoComposition_shouldContinueValidatingAfterFindingInvalidTrackIDInInstruction_layerInstruction_asset(
1864 &self,
1865 video_composition: &AVVideoComposition,
1866 video_composition_instruction: &ProtocolObject<
1867 dyn AVVideoCompositionInstructionProtocol,
1868 >,
1869 layer_instruction: &AVVideoCompositionLayerInstruction,
1870 asset: &AVAsset,
1871 ) -> bool;
1872 }
1873);