objc2_av_foundation/generated/AVCaptureFileOutput.rs
1//! This file has been automatically generated by `objc2`'s `header-translator`.
2//! DO NOT EDIT
3use core::ffi::*;
4use core::ptr::NonNull;
5use objc2::__framework_prelude::*;
6#[cfg(feature = "objc2-core-media")]
7use objc2_core_media::*;
8use objc2_foundation::*;
9
10use crate::*;
11
12extern_class!(
13 /// AVCaptureFileOutput is an abstract subclass of AVCaptureOutput that provides an interface for writing captured media to files.
14 ///
15 ///
16 /// This abstract superclass defines the interface for outputs that record media samples to files. File outputs can start recording to a new file using the startRecordingToOutputFileURL:recordingDelegate: method. On successive invocations of this method on macOS, the output file can by changed dynamically without losing media samples. A file output can stop recording using the stopRecording method. Because files are recorded in the background, applications will need to specify a delegate for each new file so that they can be notified when recorded files are finished.
17 ///
18 /// On macOS, clients can also set a delegate on the file output itself that can be used to control recording along exact media sample boundaries using the captureOutput:didOutputSampleBuffer:fromConnection: method.
19 ///
20 /// The concrete subclasses of AVCaptureFileOutput are AVCaptureMovieFileOutput, which records media to a QuickTime movie file, and AVCaptureAudioFileOutput, which writes audio media to a variety of audio file formats.
21 ///
22 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturefileoutput?language=objc)
23 #[unsafe(super(AVCaptureOutput, NSObject))]
24 #[derive(Debug, PartialEq, Eq, Hash)]
25 #[cfg(feature = "AVCaptureOutputBase")]
26 pub struct AVCaptureFileOutput;
27);
28
29#[cfg(feature = "AVCaptureOutputBase")]
30extern_conformance!(
31 unsafe impl NSObjectProtocol for AVCaptureFileOutput {}
32);
33
34#[cfg(feature = "AVCaptureOutputBase")]
35impl AVCaptureFileOutput {
36 extern_methods!(
37 /// The receiver's delegate.
38 ///
39 ///
40 /// The value of this property is an object conforming to the AVCaptureFileOutputDelegate protocol that will be able to monitor and control recording along exact sample boundaries.
41 ///
42 /// # Safety
43 ///
44 /// This is not retained internally, you must ensure the object is still alive.
45 #[unsafe(method(delegate))]
46 #[unsafe(method_family = none)]
47 pub unsafe fn delegate(
48 &self,
49 ) -> Option<Retained<ProtocolObject<dyn AVCaptureFileOutputDelegate>>>;
50
51 /// Setter for [`delegate`][Self::delegate].
52 ///
53 /// # Safety
54 ///
55 /// This is unretained, you must ensure the object is kept alive while in use.
56 #[unsafe(method(setDelegate:))]
57 #[unsafe(method_family = none)]
58 pub unsafe fn setDelegate(
59 &self,
60 delegate: Option<&ProtocolObject<dyn AVCaptureFileOutputDelegate>>,
61 );
62
63 /// The file URL of the file to which the receiver is currently recording incoming buffers.
64 ///
65 ///
66 /// The value of this property is an NSURL object containing the file URL of the file currently being written by the receiver. Returns nil if the receiver is not recording to any file.
67 #[unsafe(method(outputFileURL))]
68 #[unsafe(method_family = none)]
69 pub unsafe fn outputFileURL(&self) -> Option<Retained<NSURL>>;
70
71 /// Tells the receiver to start recording to a new file, and specifies a delegate that will be notified when recording is finished.
72 ///
73 ///
74 /// Parameter `outputFileURL`: An NSURL object containing the URL of the output file. This method throws an NSInvalidArgumentException if the URL is not a valid file URL.
75 ///
76 /// Parameter `delegate`: An object conforming to the AVCaptureFileOutputRecordingDelegate protocol. Clients must specify a delegate so that they can be notified when recording to the given URL is finished.
77 ///
78 ///
79 /// The method sets the file URL to which the receiver is currently writing output media. If a file at the given URL already exists when capturing starts, recording to the new file will fail.
80 ///
81 /// Clients need not call stopRecording before calling this method while another recording is in progress. On macOS, if this method is invoked while an existing output file was already being recorded, no media samples will be discarded between the old file and the new file.
82 ///
83 /// When recording is stopped either by calling stopRecording, by changing files using this method, or because of an error, the remaining data that needs to be included to the file will be written in the background. Therefore, clients must specify a delegate that will be notified when all data has been written to the file using the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: method. The recording delegate can also optionally implement methods that inform it when data starts being written, when recording is paused and resumed, and when recording is about to be finished.
84 ///
85 /// On macOS, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method, the first samples written to the new file are guaranteed to be those contained in the sample buffer passed to that method.
86 ///
87 /// Note: AVCaptureAudioFileOutput does not support -startRecordingToOutputFileURL:recordingDelegate:. Use -startRecordingToOutputFileURL:outputFileType:recordingDelegate: instead.
88 #[unsafe(method(startRecordingToOutputFileURL:recordingDelegate:))]
89 #[unsafe(method_family = none)]
90 pub unsafe fn startRecordingToOutputFileURL_recordingDelegate(
91 &self,
92 output_file_url: &NSURL,
93 delegate: &ProtocolObject<dyn AVCaptureFileOutputRecordingDelegate>,
94 );
95
96 /// Tells the receiver to stop recording to the current file.
97 ///
98 ///
99 /// Clients can call this method when they want to stop recording new samples to the current file, and do not want to continue recording to another file. Clients that want to switch from one file to another should not call this method. Instead they should simply call startRecordingToOutputFileURL:recordingDelegate: with the new file URL.
100 ///
101 /// When recording is stopped either by calling this method, by changing files using startRecordingToOutputFileURL:recordingDelegate:, or because of an error, the remaining data that needs to be included to the file will be written in the background. Therefore, before using the file, clients must wait until the delegate that was specified in startRecordingToOutputFileURL:recordingDelegate: is notified when all data has been written to the file using the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: method.
102 ///
103 /// On macOS, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method, the last samples written to the current file are guaranteed to be those that were output immediately before those in the sample buffer passed to that method.
104 #[unsafe(method(stopRecording))]
105 #[unsafe(method_family = none)]
106 pub unsafe fn stopRecording(&self);
107
108 /// Indicates whether the receiver is currently recording.
109 ///
110 ///
111 /// The value of this property is YES when the receiver currently has a file to which it is writing new samples, NO otherwise.
112 #[unsafe(method(isRecording))]
113 #[unsafe(method_family = none)]
114 pub unsafe fn isRecording(&self) -> bool;
115
116 /// Indicates whether recording to the current output file is paused.
117 ///
118 ///
119 /// This property indicates recording to the file returned by outputFileURL has been previously paused using the pauseRecording method. When a recording is paused, captured samples are not written to the output file, but new samples can be written to the same file in the future by calling resumeRecording.
120 #[unsafe(method(isRecordingPaused))]
121 #[unsafe(method_family = none)]
122 pub unsafe fn isRecordingPaused(&self) -> bool;
123
124 /// Pauses recording to the current output file.
125 ///
126 ///
127 /// This method causes the receiver to stop writing captured samples to the current output file returned by outputFileURL, but leaves the file open so that samples can be written to it in the future, when resumeRecording is called. This allows clients to record multiple media segments that are not contiguous in time to a single file.
128 ///
129 /// On macOS, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method, the last samples written to the current file are guaranteed to be those that were output immediately before those in the sample buffer passed to that method.
130 ///
131 /// A recording can be stopped as normal, even when it's paused.
132 ///
133 /// A format or device change will result in the recording being stopped, even when it's paused.
134 #[unsafe(method(pauseRecording))]
135 #[unsafe(method_family = none)]
136 pub unsafe fn pauseRecording(&self);
137
138 /// Resumes recording to the current output file after it was previously paused using pauseRecording.
139 ///
140 ///
141 /// This method causes the receiver to resume writing captured samples to the current output file returned by outputFileURL, after recording was previously paused using pauseRecording. This allows clients to record multiple media segments that are not contiguous in time to a single file.
142 ///
143 /// On macOS, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method, the first samples written to the current file are guaranteed to be those contained in the sample buffer passed to that method.
144 #[unsafe(method(resumeRecording))]
145 #[unsafe(method_family = none)]
146 pub unsafe fn resumeRecording(&self);
147
148 #[cfg(feature = "objc2-core-media")]
149 /// Indicates the duration of the media recorded to the current output file.
150 ///
151 ///
152 /// If recording is in progress, this property returns the total time recorded so far.
153 #[unsafe(method(recordedDuration))]
154 #[unsafe(method_family = none)]
155 pub unsafe fn recordedDuration(&self) -> CMTime;
156
157 /// Indicates the size, in bytes, of the data recorded to the current output file.
158 ///
159 ///
160 /// If a recording is in progress, this property returns the size in bytes of the data recorded so far.
161 #[unsafe(method(recordedFileSize))]
162 #[unsafe(method_family = none)]
163 pub unsafe fn recordedFileSize(&self) -> i64;
164
165 #[cfg(feature = "objc2-core-media")]
166 /// Specifies the maximum duration of the media that should be recorded by the receiver.
167 ///
168 ///
169 /// This property specifies a hard limit on the duration of recorded files. Recording is stopped when the limit is reached and the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: delegate method is invoked with an appropriate error. The default value of this property is kCMTimeInvalid, which indicates no limit.
170 #[unsafe(method(maxRecordedDuration))]
171 #[unsafe(method_family = none)]
172 pub unsafe fn maxRecordedDuration(&self) -> CMTime;
173
174 #[cfg(feature = "objc2-core-media")]
175 /// Setter for [`maxRecordedDuration`][Self::maxRecordedDuration].
176 #[unsafe(method(setMaxRecordedDuration:))]
177 #[unsafe(method_family = none)]
178 pub unsafe fn setMaxRecordedDuration(&self, max_recorded_duration: CMTime);
179
180 /// Specifies the maximum size, in bytes, of the data that should be recorded by the receiver.
181 ///
182 ///
183 /// This property specifies a hard limit on the data size of recorded files. Recording is stopped when the limit is reached and the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: delegate method is invoked with an appropriate error. The default value of this property is 0, which indicates no limit.
184 #[unsafe(method(maxRecordedFileSize))]
185 #[unsafe(method_family = none)]
186 pub unsafe fn maxRecordedFileSize(&self) -> i64;
187
188 /// Setter for [`maxRecordedFileSize`][Self::maxRecordedFileSize].
189 #[unsafe(method(setMaxRecordedFileSize:))]
190 #[unsafe(method_family = none)]
191 pub unsafe fn setMaxRecordedFileSize(&self, max_recorded_file_size: i64);
192
193 /// Specifies the minimum amount of free space, in bytes, required for recording to continue on a given volume.
194 ///
195 ///
196 /// This property specifies a hard lower limit on the amount of free space that must remain on a target volume for recording to continue. Recording is stopped when the limit is reached and the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: delegate method is invoked with an appropriate error.
197 #[unsafe(method(minFreeDiskSpaceLimit))]
198 #[unsafe(method_family = none)]
199 pub unsafe fn minFreeDiskSpaceLimit(&self) -> i64;
200
201 /// Setter for [`minFreeDiskSpaceLimit`][Self::minFreeDiskSpaceLimit].
202 #[unsafe(method(setMinFreeDiskSpaceLimit:))]
203 #[unsafe(method_family = none)]
204 pub unsafe fn setMinFreeDiskSpaceLimit(&self, min_free_disk_space_limit: i64);
205 );
206}
207
208/// Methods declared on superclass `AVCaptureOutput`.
209#[cfg(feature = "AVCaptureOutputBase")]
210impl AVCaptureFileOutput {
211 extern_methods!(
212 #[unsafe(method(init))]
213 #[unsafe(method_family = init)]
214 pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
215
216 #[unsafe(method(new))]
217 #[unsafe(method_family = new)]
218 pub unsafe fn new() -> Retained<Self>;
219 );
220}
221
222extern_protocol!(
223 /// Defines an interface for delegates of AVCaptureFileOutput to respond to events that occur in the process of recording a single file.
224 ///
225 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturefileoutputrecordingdelegate?language=objc)
226 pub unsafe trait AVCaptureFileOutputRecordingDelegate: NSObjectProtocol {
227 #[cfg(all(feature = "AVCaptureOutputBase", feature = "AVCaptureSession"))]
228 /// Informs the delegate when the output has started writing to a file.
229 ///
230 ///
231 /// Parameter `output`: The capture file output that started writing the file.
232 ///
233 /// Parameter `fileURL`: The file URL of the file that is being written.
234 ///
235 /// Parameter `connections`: An array of AVCaptureConnection objects attached to the file output that provided the data that is being written to the file.
236 ///
237 ///
238 /// This method is called when the file output has started writing data to a file. If an error condition prevents any data from being written, this method may not be called. captureOutput:willFinishRecordingToOutputFileAtURL:fromConnections:error: and captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: will always be called, even if no data is written.
239 ///
240 /// Clients should not assume that this method will be called on a specific thread, and should also try to make this method as efficient as possible.
241 #[optional]
242 #[unsafe(method(captureOutput:didStartRecordingToOutputFileAtURL:fromConnections:))]
243 #[unsafe(method_family = none)]
244 unsafe fn captureOutput_didStartRecordingToOutputFileAtURL_fromConnections(
245 &self,
246 output: &AVCaptureFileOutput,
247 file_url: &NSURL,
248 connections: &NSArray<AVCaptureConnection>,
249 );
250
251 #[cfg(all(
252 feature = "AVCaptureOutputBase",
253 feature = "AVCaptureSession",
254 feature = "objc2-core-media"
255 ))]
256 /// Informs the delegate when the output has started writing to a file.
257 ///
258 ///
259 /// Parameter `output`: The capture file output that started writing the file.
260 ///
261 /// Parameter `fileURL`: The file URL of the file that is being written.
262 ///
263 /// Parameter `startPTS`: The timestamp of the first buffer written to the file, synced with AVCaptureSession.synchronizationClock
264 ///
265 /// Parameter `connections`: An array of AVCaptureConnection objects attached to the file output that provided the data that is being written to the file.
266 ///
267 ///
268 /// This method is called when the file output has started writing data to a file. If an error condition prevents any data from being written, this method may not be called. captureOutput:willFinishRecordingToOutputFileAtURL:fromConnections:error: and captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: will always be called, even if no data is written.
269 ///
270 /// If this method is implemented, the alternative delegate callback -captureOutput:didStartRecordingToOutputFileAtURL:fromConnections will not be called.
271 ///
272 /// Clients should not assume that this method will be called on a specific thread, and should also try to make this method as efficient as possible.
273 #[optional]
274 #[unsafe(method(captureOutput:didStartRecordingToOutputFileAtURL:startPTS:fromConnections:))]
275 #[unsafe(method_family = none)]
276 unsafe fn captureOutput_didStartRecordingToOutputFileAtURL_startPTS_fromConnections(
277 &self,
278 output: &AVCaptureFileOutput,
279 file_url: &NSURL,
280 start_pts: CMTime,
281 connections: &NSArray<AVCaptureConnection>,
282 );
283
284 #[cfg(all(feature = "AVCaptureOutputBase", feature = "AVCaptureSession"))]
285 /// Called whenever the output is recording to a file and successfully pauses the recording at the request of the client.
286 ///
287 ///
288 /// Parameter `output`: The capture file output that has paused its file recording.
289 ///
290 /// Parameter `fileURL`: The file URL of the file that is being written.
291 ///
292 /// Parameter `connections`: An array of AVCaptureConnection objects attached to the file output that provided the data that is being written to the file.
293 ///
294 ///
295 /// Delegates can use this method to be informed when a request to pause recording is actually respected. It is safe for delegates to change what the file output is currently doing (starting a new file, for example) from within this method. If recording to a file is stopped, either manually or due to an error, this method is not guaranteed to be called, even if a previous call to pauseRecording was made.
296 ///
297 /// Clients should not assume that this method will be called on a specific thread, and should also try to make this method as efficient as possible.
298 #[optional]
299 #[unsafe(method(captureOutput:didPauseRecordingToOutputFileAtURL:fromConnections:))]
300 #[unsafe(method_family = none)]
301 unsafe fn captureOutput_didPauseRecordingToOutputFileAtURL_fromConnections(
302 &self,
303 output: &AVCaptureFileOutput,
304 file_url: &NSURL,
305 connections: &NSArray<AVCaptureConnection>,
306 );
307
308 #[cfg(all(feature = "AVCaptureOutputBase", feature = "AVCaptureSession"))]
309 /// Called whenever the output, at the request of the client, successfully resumes a file recording that was paused.
310 ///
311 ///
312 /// Parameter `output`: The capture file output that has resumed its paused file recording.
313 ///
314 /// Parameter `fileURL`: The file URL of the file that is being written.
315 ///
316 /// Parameter `connections`: An array of AVCaptureConnection objects attached to the file output that provided the data that is being written to the file.
317 ///
318 ///
319 /// Delegates can use this method to be informed when a request to resume recording is actually respected. It is safe for delegates to change what the file output is currently doing (starting a new file, for example) from within this method. If recording to a file is stopped, either manually or due to an error, this method is not guaranteed to be called, even if a previous call to resumeRecording was made.
320 ///
321 /// Clients should not assume that this method will be called on a specific thread, and should also try to make this method as efficient as possible.
322 #[optional]
323 #[unsafe(method(captureOutput:didResumeRecordingToOutputFileAtURL:fromConnections:))]
324 #[unsafe(method_family = none)]
325 unsafe fn captureOutput_didResumeRecordingToOutputFileAtURL_fromConnections(
326 &self,
327 output: &AVCaptureFileOutput,
328 file_url: &NSURL,
329 connections: &NSArray<AVCaptureConnection>,
330 );
331
332 #[cfg(all(feature = "AVCaptureOutputBase", feature = "AVCaptureSession"))]
333 /// Informs the delegate when the output will stop writing new samples to a file.
334 ///
335 ///
336 /// Parameter `output`: The capture file output that will finish writing the file.
337 ///
338 /// Parameter `fileURL`: The file URL of the file that is being written.
339 ///
340 /// Parameter `connections`: An array of AVCaptureConnection objects attached to the file output that provided the data that is being written to the file.
341 ///
342 /// Parameter `error`: An error describing what caused the file to stop recording, or nil if there was no error.
343 ///
344 ///
345 /// This method is called when the file output will stop recording new samples to the file at outputFileURL, either because startRecordingToOutputFileURL:recordingDelegate: or stopRecording were called, or because an error, described by the error parameter, occurred (if no error occurred, the error parameter will be nil). This method will always be called for each recording request, even if no data is successfully written to the file.
346 ///
347 /// Clients should not assume that this method will be called on a specific thread, and should also try to make this method as efficient as possible.
348 #[optional]
349 #[unsafe(method(captureOutput:willFinishRecordingToOutputFileAtURL:fromConnections:error:))]
350 #[unsafe(method_family = none)]
351 unsafe fn captureOutput_willFinishRecordingToOutputFileAtURL_fromConnections_error(
352 &self,
353 output: &AVCaptureFileOutput,
354 file_url: &NSURL,
355 connections: &NSArray<AVCaptureConnection>,
356 error: Option<&NSError>,
357 );
358
359 #[cfg(all(feature = "AVCaptureOutputBase", feature = "AVCaptureSession"))]
360 /// Informs the delegate when all pending data has been written to an output file.
361 ///
362 ///
363 /// Parameter `output`: The capture file output that has finished writing the file.
364 ///
365 /// Parameter `outputFileURL`: The file URL of the file that has been written.
366 ///
367 /// Parameter `connections`: An array of AVCaptureConnection objects attached to the file output that provided the data that was written to the file.
368 ///
369 /// Parameter `error`: An error describing what caused the file to stop recording, or nil if there was no error.
370 ///
371 ///
372 /// This method is called when the file output has finished writing all data to a file whose recording was stopped, either because startRecordingToOutputFileURL:recordingDelegate: or stopRecording were called, or because an error, described by the error parameter, occurred (if no error occurred, the error parameter will be nil). This method will always be called for each recording request, even if no data is successfully written to the file.
373 ///
374 /// Clients should not assume that this method will be called on a specific thread.
375 ///
376 /// Delegates are required to implement this method.
377 #[unsafe(method(captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error:))]
378 #[unsafe(method_family = none)]
379 unsafe fn captureOutput_didFinishRecordingToOutputFileAtURL_fromConnections_error(
380 &self,
381 output: &AVCaptureFileOutput,
382 output_file_url: &NSURL,
383 connections: &NSArray<AVCaptureConnection>,
384 error: Option<&NSError>,
385 );
386 }
387);
388
389extern_protocol!(
390 /// Defines an interface for delegates of AVCaptureFileOutput to monitor and control recordings along exact sample boundaries.
391 ///
392 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturefileoutputdelegate?language=objc)
393 pub unsafe trait AVCaptureFileOutputDelegate: NSObjectProtocol {
394 #[cfg(feature = "AVCaptureOutputBase")]
395 /// Allows a client to opt in to frame accurate record-start in captureOutput:didOutputSampleBuffer:fromConnection:
396 ///
397 ///
398 /// Parameter `output`: The AVCaptureFileOutput instance with which the delegate is associated.
399 ///
400 ///
401 /// In apps linked before macOS 10.8, delegates that implement the captureOutput:didOutputSampleBuffer:fromConnection: method can ensure frame accurate start / stop of a recording by calling startRecordingToOutputFileURL:recordingDelegate: from within the callback. Frame accurate start requires the capture output to apply outputSettings when the session starts running, so it is ready to record on any given frame boundary. Compressing all the time while the session is running has power, thermal, and CPU implications. In apps linked on or after macOS 10.8, delegates must implement captureOutputShouldProvideSampleAccurateRecordingStart: to indicate whether frame accurate start/stop recording is required (returning YES) or not (returning NO). The output calls this method as soon as the delegate is added, and never again. If your delegate returns NO, the capture output applies compression settings when startRecordingToOutputFileURL:recordingDelegate: is called, and disables compression settings after the recording is stopped.
402 #[unsafe(method(captureOutputShouldProvideSampleAccurateRecordingStart:))]
403 #[unsafe(method_family = none)]
404 unsafe fn captureOutputShouldProvideSampleAccurateRecordingStart(
405 &self,
406 output: &AVCaptureFileOutput,
407 ) -> bool;
408
409 #[cfg(all(
410 feature = "AVCaptureOutputBase",
411 feature = "AVCaptureSession",
412 feature = "objc2-core-media"
413 ))]
414 /// Gives the delegate the opportunity to inspect samples as they are received by the output and optionally start and stop recording at exact times.
415 ///
416 ///
417 /// Parameter `output`: The capture file output that is receiving the media data.
418 ///
419 /// Parameter `sampleBuffer`: A CMSampleBuffer object containing the sample data and additional information about the sample, such as its format and presentation time.
420 ///
421 /// Parameter `connection`: The AVCaptureConnection object attached to the file output from which the sample data was received.
422 ///
423 ///
424 /// This method is called whenever the file output receives a single sample buffer (a single video frame or audio buffer, for example) from the given connection. This gives delegates an opportunity to start and stop recording or change output files at an exact sample boundary if -captureOutputShouldProvideSampleAccurateRecordingStart: returns YES. If called from within this method, the file output's startRecordingToOutputFileURL:recordingDelegate: and resumeRecording methods are guaranteed to include the received sample buffer in the new file, whereas calls to stopRecording and pauseRecording are guaranteed to include all samples leading up to those in the current sample buffer in the existing file.
425 ///
426 /// Delegates can gather information particular to the samples by inspecting the CMSampleBuffer object. Sample buffers always contain a single frame of video if called from this method but may also contain multiple samples of audio. For B-frame video formats, samples are always delivered in presentation order.
427 ///
428 /// Clients that need to reference the CMSampleBuffer object outside of the scope of this method must CFRetain it and then CFRelease it when they are finished with it.
429 ///
430 /// Note that to maintain optimal performance, some sample buffers directly reference pools of memory that may need to be reused by the device system and other capture inputs. This is frequently the case for uncompressed device native capture where memory blocks are copied as little as possible. If multiple sample buffers reference such pools of memory for too long, inputs will no longer be able to copy new samples into memory and those samples will be dropped. If your application is causing samples to be dropped by retaining the provided CMSampleBuffer objects for too long, but it needs access to the sample data for a long period of time, consider copying the data into a new buffer and then calling CFRelease on the sample buffer if it was previously retained so that the memory it references can be reused.
431 ///
432 /// Clients should not assume that this method will be called on a specific thread. In addition, this method is called periodically, so it must be efficient to prevent capture performance problems.
433 #[optional]
434 #[unsafe(method(captureOutput:didOutputSampleBuffer:fromConnection:))]
435 #[unsafe(method_family = none)]
436 unsafe fn captureOutput_didOutputSampleBuffer_fromConnection(
437 &self,
438 output: &AVCaptureFileOutput,
439 sample_buffer: &CMSampleBuffer,
440 connection: &AVCaptureConnection,
441 );
442 }
443);
444
445extern_class!(
446 /// AVCaptureMovieFileOutput is a concrete subclass of AVCaptureFileOutput that writes captured media to QuickTime movie files.
447 ///
448 ///
449 /// AVCaptureMovieFileOutput implements the complete file recording interface declared by AVCaptureFileOutput for writing media data to QuickTime movie files. In addition, instances of AVCaptureMovieFileOutput allow clients to configure options specific to the QuickTime file format, including allowing them to write metadata collections to each file, specify media encoding options for each track (macOS), and specify an interval at which movie fragments should be written.
450 ///
451 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturemoviefileoutput?language=objc)
452 #[unsafe(super(AVCaptureFileOutput, AVCaptureOutput, NSObject))]
453 #[derive(Debug, PartialEq, Eq, Hash)]
454 #[cfg(feature = "AVCaptureOutputBase")]
455 pub struct AVCaptureMovieFileOutput;
456);
457
458#[cfg(feature = "AVCaptureOutputBase")]
459extern_conformance!(
460 unsafe impl NSObjectProtocol for AVCaptureMovieFileOutput {}
461);
462
463#[cfg(feature = "AVCaptureOutputBase")]
464impl AVCaptureMovieFileOutput {
465 extern_methods!(
466 #[unsafe(method(init))]
467 #[unsafe(method_family = init)]
468 pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
469
470 #[unsafe(method(new))]
471 #[unsafe(method_family = new)]
472 pub unsafe fn new() -> Retained<Self>;
473
474 #[cfg(feature = "objc2-core-media")]
475 /// Specifies the frequency with which movie fragments should be written.
476 ///
477 ///
478 /// When movie fragments are used, a partially written QuickTime movie file whose writing is unexpectedly interrupted can be successfully opened and played up to multiples of the specified time interval. A value of kCMTimeInvalid indicates that movie fragments should not be used, but that only a movie atom describing all of the media in the file should be written. The default value of this property is ten seconds.
479 ///
480 /// Changing the value of this property will not affect the movie fragment interval of the file currently being written, if there is one.
481 ///
482 /// For best writing performance on external storage devices, set the movieFragmentInterval to 10 seconds or greater. If the size of a movie fragment is greater than or equal to 2GB, an interval is added at 2GB mark.
483 #[unsafe(method(movieFragmentInterval))]
484 #[unsafe(method_family = none)]
485 pub unsafe fn movieFragmentInterval(&self) -> CMTime;
486
487 #[cfg(feature = "objc2-core-media")]
488 /// Setter for [`movieFragmentInterval`][Self::movieFragmentInterval].
489 #[unsafe(method(setMovieFragmentInterval:))]
490 #[unsafe(method_family = none)]
491 pub unsafe fn setMovieFragmentInterval(&self, movie_fragment_interval: CMTime);
492
493 #[cfg(feature = "AVMetadataItem")]
494 /// A collection of metadata to be written to the receiver's output files.
495 ///
496 ///
497 /// The value of this property is an array of AVMetadataItem objects representing the collection of top-level metadata to be written in each output file.
498 #[unsafe(method(metadata))]
499 #[unsafe(method_family = none)]
500 pub unsafe fn metadata(&self) -> Option<Retained<NSArray<AVMetadataItem>>>;
501
502 #[cfg(feature = "AVMetadataItem")]
503 /// Setter for [`metadata`][Self::metadata].
504 ///
505 /// This is [copied][objc2_foundation::NSCopying::copy] when set.
506 #[unsafe(method(setMetadata:))]
507 #[unsafe(method_family = none)]
508 pub unsafe fn setMetadata(&self, metadata: Option<&NSArray<AVMetadataItem>>);
509
510 #[cfg(feature = "AVVideoSettings")]
511 /// Indicates the supported video codec formats that can be specified in setOutputSettingsForConnection:.
512 ///
513 ///
514 /// The value of this property is an NSArray of AVVideoCodecTypes that can be used as values for the AVVideoCodecKey in the receiver's setOutputSettingsForConnection: dictionary. The array of available video codecs may change depending on the current session preset. The first codec in the array is used by default when recording a file.
515 #[unsafe(method(availableVideoCodecTypes))]
516 #[unsafe(method_family = none)]
517 pub unsafe fn availableVideoCodecTypes(&self) -> Retained<NSArray<AVVideoCodecType>>;
518
519 #[cfg(feature = "AVCaptureSession")]
520 /// Indicates the supported keys that can be specified in setOutputSettings:forConnection:.
521 ///
522 ///
523 /// Parameter `connection`: The connection delivering the media to be encoded.
524 ///
525 ///
526 /// Returns an NSArray of NSStrings listing the allowable keys in the receiver's setOutputSettings:forConnection: dictionary.
527 #[unsafe(method(supportedOutputSettingsKeysForConnection:))]
528 #[unsafe(method_family = none)]
529 pub unsafe fn supportedOutputSettingsKeysForConnection(
530 &self,
531 connection: &AVCaptureConnection,
532 ) -> Retained<NSArray<NSString>>;
533
534 #[cfg(feature = "AVCaptureSession")]
535 /// Returns the options the receiver uses to encode media from the given connection as it is being recorded.
536 ///
537 ///
538 /// Parameter `connection`: The connection delivering the media to be encoded.
539 ///
540 /// Returns: An NSDictionary of output settings.
541 ///
542 ///
543 /// See AVAudioSettings.h for audio connections or AVVideoSettings.h for video connections for more information on the structure of an output settings dictionary. If the returned value is an empty dictionary (i.e. [NSDictionary dictionary], the format of the media from the connection will not be changed before being written to the file. If -setOutputSettings:forConnection: was called with a nil dictionary, this method returns a non-nil dictionary reflecting the settings used by the AVCaptureSession's current sessionPreset.
544 #[unsafe(method(outputSettingsForConnection:))]
545 #[unsafe(method_family = none)]
546 pub unsafe fn outputSettingsForConnection(
547 &self,
548 connection: &AVCaptureConnection,
549 ) -> Retained<NSDictionary<NSString, AnyObject>>;
550
551 #[cfg(feature = "AVCaptureSession")]
552 /// Sets the options the receiver uses to encode media from the given connection as it is being recorded.
553 ///
554 ///
555 /// Parameter `outputSettings`: An NSDictionary of output settings.
556 ///
557 /// Parameter `connection`: The connection delivering the media to be encoded.
558 ///
559 ///
560 /// See AVAudioSettings.h for audio connections or AVVideoSettings.h for video connections for more information on how to construct an output settings dictionary. A value of an empty dictionary (i.e. +[NSDictionary dictionary]), means that the format of the media from the connection should not be changed before being written to the file. A value of nil means that the output format will be determined by the session preset. In this case, -outputSettingsForConnection: will return a non-nil dictionary reflecting the settings used by the AVCaptureSession's current sessionPreset.
561 ///
562 /// On iOS, your outputSettings dictionary may only contain keys listed in - supportedOutputSettingsKeysForConnection:. If you specify any other key, an NSInvalidArgumentException will be thrown. Further restrictions may be imposed on the AVVideoCodecTypeKey. Its value should be present in the -availableVideoCodecTypes array. If AVVideoCompressionPropertiesKey is specified, you must also specify a valid value for AVVideoCodecKey. On iOS versions prior to 12.0, the only settable key for video connections is AVVideoCodecTypeKey. On iOS 12.0 and later, video connections gain support for AVVideoCompressionPropertiesKey.
563 ///
564 /// On iOS, -outputSettingsForConnection: always provides a fully populated dictionary. If you call -outputSettingsForConnection: with the intent of overriding a few of the values, you must take care to exclude keys that are not supported before calling -setOutputSettings:forConnection:. When providing an AVVideoCompressionPropertiesKey sub dictionary, you may specify a sparse dictionary. AVCaptureMovieFileOutput will always fill in missing keys with default values for the current AVCaptureSession configuration.
565 ///
566 /// # Safety
567 ///
568 /// `output_settings` generic should be of the correct type.
569 #[unsafe(method(setOutputSettings:forConnection:))]
570 #[unsafe(method_family = none)]
571 pub unsafe fn setOutputSettings_forConnection(
572 &self,
573 output_settings: Option<&NSDictionary<NSString, AnyObject>>,
574 connection: &AVCaptureConnection,
575 );
576
577 #[cfg(feature = "AVCaptureSession")]
578 /// Returns YES if the movie file output will create a timed metadata track that records samples which reflect changes made to the given connection's videoOrientation and videoMirrored properties during recording.
579 ///
580 ///
581 /// Parameter `connection`: A connection delivering video media to the movie file output. This method throws an NSInvalidArgumentException if the connection does not have a mediaType of AVMediaTypeVideo or if the connection does not terminate at the movie file output.
582 ///
583 ///
584 /// See setRecordsVideoOrientationAndMirroringChanges:asMetadataTrackForConnection: for details on the behavior controlled by this value. The default value returned is NO.
585 #[unsafe(method(recordsVideoOrientationAndMirroringChangesAsMetadataTrackForConnection:))]
586 #[unsafe(method_family = none)]
587 pub unsafe fn recordsVideoOrientationAndMirroringChangesAsMetadataTrackForConnection(
588 &self,
589 connection: &AVCaptureConnection,
590 ) -> bool;
591
592 #[cfg(feature = "AVCaptureSession")]
593 /// Controls whether or not the movie file output will create a timed metadata track that records samples which reflect changes made to the given connection's videoOrientation and videoMirrored properties during recording.
594 ///
595 ///
596 /// Parameter `doRecordChanges`: If YES, the movie file output will create a timed metadata track that records samples which reflect changes made to the given connection's videoOrientation and videoMirrored properties during recording.
597 ///
598 /// Parameter `connection`: A connection delivering video media to the movie file output. This method throws an NSInvalidArgumentException if the connection does not have a mediaType of AVMediaTypeVideo or if the connection does not terminate at the movie file output.
599 ///
600 ///
601 /// When a recording is started the current state of a video capture connection's videoOrientation and videoMirrored properties are used to build the display matrix for the created video track. The movie file format allows only one display matrix per track, which means that any changes made during a recording to the videoOrientation and videoMirrored properties are not captured. For example, a user starts a recording with their device in the portrait orientation, and then partway through the recording changes the device to a landscape orientation. The landscape orientation requires a different display matrix, but only the initial display matrix (the portrait display matrix) is recorded for the video track.
602 ///
603 /// By invoking this method the client application directs the movie file output to create an additional track in the captured movie. This track is a timed metadata track that is associated with the video track, and contains one or more samples that contain a Video Orientation value (as defined by EXIF and TIFF specifications, which is enumerated by CGImagePropertyOrientation in
604 /// <ImageIO
605 /// /CGImageProperties.h>). The value represents the display matrix corresponding to the AVCaptureConnection's videoOrientation and videoMirrored properties when applied to the input source. The initial sample written to the timed metadata track represents video track's display matrix. During recording additional samples will be written to the timed metadata track whenever the client application changes the video connection's videoOrienation or videoMirrored properties. Using the above example, when the client application detects the user changing the device from portrait to landscape orientation, it updates the video connection's videoOrientation property, thus causing the movie file output to add a new sample to the timed metadata track.
606 ///
607 /// After capture, playback and editing applications can use the timed metadata track to enhance their user's experience. For example, when playing back the captured movie, a playback engine can use the samples to adjust the display of the video samples to keep the video properly oriented. Another example is an editing application that uses the sample the sample times to suggest cut points for breaking the captured movie into separate clips, where each clip is properly oriented.
608 ///
609 /// The default behavior is to not create the timed metadata track.
610 ///
611 /// The doRecordChanges value is only observed at the start of recording. Changes to the value will not have any effect until the next recording is started.
612 #[unsafe(method(setRecordsVideoOrientationAndMirroringChanges:asMetadataTrackForConnection:))]
613 #[unsafe(method_family = none)]
614 pub unsafe fn setRecordsVideoOrientationAndMirroringChanges_asMetadataTrackForConnection(
615 &self,
616 do_record_changes: bool,
617 connection: &AVCaptureConnection,
618 );
619
620 /// Enable or disable a constituent device selection behavior when recording.
621 ///
622 ///
623 /// This property enables a camera selection behavior to be applied when recording a movie. Once recording starts, the specified behavior and conditions take effect. Once recording stops the camera selection will change back to the primaryConstituentDeviceSwitchingBehavior specified by the AVCaptureDevice. By default, this property is set to YES when connected to an AVCaptureDevice that supports constituent device switching.
624 #[unsafe(method(isPrimaryConstituentDeviceSwitchingBehaviorForRecordingEnabled))]
625 #[unsafe(method_family = none)]
626 pub unsafe fn isPrimaryConstituentDeviceSwitchingBehaviorForRecordingEnabled(&self)
627 -> bool;
628
629 /// Setter for [`isPrimaryConstituentDeviceSwitchingBehaviorForRecordingEnabled`][Self::isPrimaryConstituentDeviceSwitchingBehaviorForRecordingEnabled].
630 #[unsafe(method(setPrimaryConstituentDeviceSwitchingBehaviorForRecordingEnabled:))]
631 #[unsafe(method_family = none)]
632 pub unsafe fn setPrimaryConstituentDeviceSwitchingBehaviorForRecordingEnabled(
633 &self,
634 primary_constituent_device_switching_behavior_for_recording_enabled: bool,
635 );
636
637 #[cfg(feature = "AVCaptureDevice")]
638 /// When primaryConstituentDeviceSwitchingBehaviorForRecordingEnabled is set to YES, this method controls the switching behavior and conditions, while a movie file is being recorded.
639 ///
640 ///
641 /// This controls the camera selection behavior used while recording a movie, when enabled through primaryConstituentDeviceSwitchingBehaviorForRecordingEnabled. Setting the switching behavior to anything other than AVCapturePrimaryConstituentDeviceSwitchingBehaviorUnsupported when connected to an AVCaptureDevice that does not support constituent device selection throws an NSInvalidArgumentException. Setting restrictedSwitchingBehaviorConditions to something other than AVCapturePrimaryConstituentDeviceRestrictedSwitchingBehaviorConditionNone while setting switchingBehavior to something other than AVCapturePrimaryConstituentDeviceSwitchingBehaviorRestricted throws an NSInvalidArgumentException exception.
642 #[unsafe(method(setPrimaryConstituentDeviceSwitchingBehaviorForRecording:restrictedSwitchingBehaviorConditions:))]
643 #[unsafe(method_family = none)]
644 pub unsafe fn setPrimaryConstituentDeviceSwitchingBehaviorForRecording_restrictedSwitchingBehaviorConditions(
645 &self,
646 switching_behavior: AVCapturePrimaryConstituentDeviceSwitchingBehavior,
647 restricted_switching_behavior_conditions: AVCapturePrimaryConstituentDeviceRestrictedSwitchingBehaviorConditions,
648 );
649
650 #[cfg(feature = "AVCaptureDevice")]
651 /// The primaryConstituentDeviceSwitchingBehavior as set by -[AVCaptureMovieFileOutput setPrimaryConstituentDeviceSwitchingBehaviorForRecording:restrictedSwitchingBehaviorConditions:].
652 ///
653 ///
654 /// By default, this property is set to AVCapturePrimaryConstituentDeviceSwitchingBehaviorRestricted. This property is key-value observable.
655 #[unsafe(method(primaryConstituentDeviceSwitchingBehaviorForRecording))]
656 #[unsafe(method_family = none)]
657 pub unsafe fn primaryConstituentDeviceSwitchingBehaviorForRecording(
658 &self,
659 ) -> AVCapturePrimaryConstituentDeviceSwitchingBehavior;
660
661 #[cfg(feature = "AVCaptureDevice")]
662 /// The primaryConstituentDeviceRestrictedSwitchingBehaviorConditions as set by -[AVCaptureMovieFileOutput setPrimaryConstituentDeviceSwitchingBehaviorForRecording:restrictedSwitchingBehaviorConditions:].
663 ///
664 ///
665 /// By default, this property is set to AVCapturePrimaryConstituentDeviceRestrictedSwitchingBehaviorCondition{VideoZoomChanged | FocusModeChanged | ExposureModeChanged}. This property is key-value observable.
666 #[unsafe(method(primaryConstituentDeviceRestrictedSwitchingBehaviorConditionsForRecording))]
667 #[unsafe(method_family = none)]
668 pub unsafe fn primaryConstituentDeviceRestrictedSwitchingBehaviorConditionsForRecording(
669 &self,
670 ) -> AVCapturePrimaryConstituentDeviceRestrictedSwitchingBehaviorConditions;
671
672 /// Returns whether or not capturing spatial video to a file is supported. Note that in order to be supported, two conditions must be met. (1) The source AVCaptureDevice's activeFormat.spatialVideoCaptureSupported property must return YES. (2) The video AVCaptureConnection's activeVideoStabilizationMode property must return AVCaptureVideoStabilizationModeCinematic, AVCaptureVideoStabilizationModeCinematicExtended, or AVCaptureVideoStabilizationModeCinematicExtendedEnhanced.
673 #[unsafe(method(isSpatialVideoCaptureSupported))]
674 #[unsafe(method_family = none)]
675 pub unsafe fn isSpatialVideoCaptureSupported(&self) -> bool;
676
677 /// Enable or disable capturing spatial video to a file.
678 ///
679 ///
680 /// This property enables capturing spatial video to a file. By default, this property is set to NO. Check spatialVideoCaptureSupported before setting this property, as setting to YES will throw an exception if the feature is not supported.
681 ///
682 /// On iOS, enabling spatial video will overwrite the connected AVCaptureDevice's `videoZoomFactor`, `minAvailableVideoZoomFactor`, and `maxAvailableVideoZoomFactor` to the field of view of the narrower camera in the pair.
683 ///
684 /// When spatialVideoCaptureEnabled is true, setting -[AVCaptureDeviceInput activeVideoMinFrameDuration] or -[AVCaptureDeviceInput activeVideoMaxFrameDuration] throws an NSInvalidArgumentException.
685 ///
686 /// Enabling this property throws an NSInvalidArgumentException if -[AVCaptureDevice isVideoFrameDurationLocked] or -[AVCaptureDevice isFollowingExternalSyncDevice] is true.
687 #[unsafe(method(isSpatialVideoCaptureEnabled))]
688 #[unsafe(method_family = none)]
689 pub unsafe fn isSpatialVideoCaptureEnabled(&self) -> bool;
690
691 /// Setter for [`isSpatialVideoCaptureEnabled`][Self::isSpatialVideoCaptureEnabled].
692 #[unsafe(method(setSpatialVideoCaptureEnabled:))]
693 #[unsafe(method_family = none)]
694 pub unsafe fn setSpatialVideoCaptureEnabled(&self, spatial_video_capture_enabled: bool);
695 );
696}
697
698extern_class!(
699 /// AVCaptureAudioFileOutput is a concrete subclass of AVCaptureFileOutput that writes captured audio to any audio file type supported by CoreAudio.
700 ///
701 ///
702 /// AVCaptureAudioFileOutput implements the complete file recording interface declared by AVCaptureFileOutput for writing media data to audio files. In addition, instances of AVCaptureAudioFileOutput allow clients to configure options specific to the audio file formats, including allowing them to write metadata collections to each file and specify audio encoding options.
703 ///
704 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcaptureaudiofileoutput?language=objc)
705 #[unsafe(super(AVCaptureFileOutput, AVCaptureOutput, NSObject))]
706 #[derive(Debug, PartialEq, Eq, Hash)]
707 #[cfg(feature = "AVCaptureOutputBase")]
708 pub struct AVCaptureAudioFileOutput;
709);
710
711#[cfg(feature = "AVCaptureOutputBase")]
712extern_conformance!(
713 unsafe impl NSObjectProtocol for AVCaptureAudioFileOutput {}
714);
715
716#[cfg(feature = "AVCaptureOutputBase")]
717impl AVCaptureAudioFileOutput {
718 extern_methods!(
719 #[unsafe(method(init))]
720 #[unsafe(method_family = init)]
721 pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
722
723 #[unsafe(method(new))]
724 #[unsafe(method_family = new)]
725 pub unsafe fn new() -> Retained<Self>;
726
727 #[cfg(feature = "AVMediaFormat")]
728 /// Provides the file types AVCaptureAudioFileOutput can write.
729 ///
730 ///
731 /// Returns: An NSArray of UTIs identifying the file types the AVCaptureAudioFileOutput class can write.
732 #[unsafe(method(availableOutputFileTypes))]
733 #[unsafe(method_family = none)]
734 pub unsafe fn availableOutputFileTypes() -> Retained<NSArray<AVFileType>>;
735
736 #[cfg(feature = "AVMediaFormat")]
737 /// Tells the receiver to start recording to a new file of the specified format, and specifies a delegate that will be notified when recording is finished.
738 ///
739 ///
740 /// Parameter `outputFileURL`: An NSURL object containing the URL of the output file. This method throws an NSInvalidArgumentException if the URL is not a valid file URL.
741 ///
742 /// Parameter `fileType`: A UTI indicating the format of the file to be written.
743 ///
744 /// Parameter `delegate`: An object conforming to the AVCaptureFileOutputRecordingDelegate protocol. Clients must specify a delegate so that they can be notified when recording to the given URL is finished.
745 ///
746 ///
747 /// The method sets the file URL to which the receiver is currently writing output media. If a file at the given URL already exists when capturing starts, recording to the new file will fail.
748 ///
749 /// The fileType argument is a UTI corresponding to the audio file format that should be written. UTIs for common audio file types are declared in AVMediaFormat.h.
750 ///
751 /// Clients need not call stopRecording before calling this method while another recording is in progress. If this method is invoked while an existing output file was already being recorded, no media samples will be discarded between the old file and the new file.
752 ///
753 /// When recording is stopped either by calling stopRecording, by changing files using this method, or because of an error, the remaining data that needs to be included to the file will be written in the background. Therefore, clients must specify a delegate that will be notified when all data has been written to the file using the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: method. The recording delegate can also optionally implement methods that inform it when data starts being written, when recording is paused and resumed, and when recording is about to be finished.
754 ///
755 /// On macOS, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method, the first samples written to the new file are guaranteed to be those contained in the sample buffer passed to that method.
756 #[unsafe(method(startRecordingToOutputFileURL:outputFileType:recordingDelegate:))]
757 #[unsafe(method_family = none)]
758 pub unsafe fn startRecordingToOutputFileURL_outputFileType_recordingDelegate(
759 &self,
760 output_file_url: &NSURL,
761 file_type: &AVFileType,
762 delegate: &ProtocolObject<dyn AVCaptureFileOutputRecordingDelegate>,
763 );
764
765 #[cfg(feature = "AVMetadataItem")]
766 /// A collection of metadata to be written to the receiver's output files.
767 ///
768 ///
769 /// The value of this property is an array of AVMetadataItem objects representing the collection of top-level metadata to be written in each output file. Only ID3 v2.2, v2.3, or v2.4 style metadata items are supported.
770 #[unsafe(method(metadata))]
771 #[unsafe(method_family = none)]
772 pub unsafe fn metadata(&self) -> Retained<NSArray<AVMetadataItem>>;
773
774 #[cfg(feature = "AVMetadataItem")]
775 /// Setter for [`metadata`][Self::metadata].
776 ///
777 /// This is [copied][objc2_foundation::NSCopying::copy] when set.
778 #[unsafe(method(setMetadata:))]
779 #[unsafe(method_family = none)]
780 pub unsafe fn setMetadata(&self, metadata: &NSArray<AVMetadataItem>);
781
782 /// Specifies the options the receiver uses to re-encode audio as it is being recorded.
783 ///
784 ///
785 /// The output settings dictionary can contain values for keys from AVAudioSettings.h. A value of nil indicates that the format of the audio should not be changed before being written to the file.
786 #[unsafe(method(audioSettings))]
787 #[unsafe(method_family = none)]
788 pub unsafe fn audioSettings(&self) -> Option<Retained<NSDictionary<NSString, AnyObject>>>;
789
790 /// Setter for [`audioSettings`][Self::audioSettings].
791 ///
792 /// This is [copied][objc2_foundation::NSCopying::copy] when set.
793 ///
794 /// # Safety
795 ///
796 /// `audio_settings` generic should be of the correct type.
797 #[unsafe(method(setAudioSettings:))]
798 #[unsafe(method_family = none)]
799 pub unsafe fn setAudioSettings(
800 &self,
801 audio_settings: Option<&NSDictionary<NSString, AnyObject>>,
802 );
803 );
804}