objc2_av_foundation/generated/AVCaptureFileOutput.rs
1//! This file has been automatically generated by `objc2`'s `header-translator`.
2//! DO NOT EDIT
3use core::ffi::*;
4use core::ptr::NonNull;
5use objc2::__framework_prelude::*;
6#[cfg(feature = "objc2-core-media")]
7use objc2_core_media::*;
8use objc2_foundation::*;
9
10use crate::*;
11
12extern_class!(
13 /// AVCaptureFileOutput is an abstract subclass of AVCaptureOutput that provides an interface for writing captured media to files.
14 ///
15 ///
16 /// This abstract superclass defines the interface for outputs that record media samples to files. File outputs can start recording to a new file using the startRecordingToOutputFileURL:recordingDelegate: method. On successive invocations of this method on macOS, the output file can by changed dynamically without losing media samples. A file output can stop recording using the stopRecording method. Because files are recorded in the background, applications will need to specify a delegate for each new file so that they can be notified when recorded files are finished.
17 ///
18 /// On macOS, clients can also set a delegate on the file output itself that can be used to control recording along exact media sample boundaries using the captureOutput:didOutputSampleBuffer:fromConnection: method.
19 ///
20 /// The concrete subclasses of AVCaptureFileOutput are AVCaptureMovieFileOutput, which records media to a QuickTime movie file, and AVCaptureAudioFileOutput, which writes audio media to a variety of audio file formats.
21 ///
22 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturefileoutput?language=objc)
23 #[unsafe(super(AVCaptureOutput, NSObject))]
24 #[derive(Debug, PartialEq, Eq, Hash)]
25 #[cfg(feature = "AVCaptureOutputBase")]
26 pub struct AVCaptureFileOutput;
27);
28
29#[cfg(feature = "AVCaptureOutputBase")]
30unsafe impl NSObjectProtocol for AVCaptureFileOutput {}
31
32#[cfg(feature = "AVCaptureOutputBase")]
33impl AVCaptureFileOutput {
34 extern_methods!(
35 /// The receiver's delegate.
36 ///
37 ///
38 /// The value of this property is an object conforming to the AVCaptureFileOutputDelegate protocol that will be able to monitor and control recording along exact sample boundaries.
39 #[unsafe(method(delegate))]
40 #[unsafe(method_family = none)]
41 pub unsafe fn delegate(
42 &self,
43 ) -> Option<Retained<ProtocolObject<dyn AVCaptureFileOutputDelegate>>>;
44
45 /// Setter for [`delegate`][Self::delegate].
46 #[unsafe(method(setDelegate:))]
47 #[unsafe(method_family = none)]
48 pub unsafe fn setDelegate(
49 &self,
50 delegate: Option<&ProtocolObject<dyn AVCaptureFileOutputDelegate>>,
51 );
52
53 /// The file URL of the file to which the receiver is currently recording incoming buffers.
54 ///
55 ///
56 /// The value of this property is an NSURL object containing the file URL of the file currently being written by the receiver. Returns nil if the receiver is not recording to any file.
57 #[unsafe(method(outputFileURL))]
58 #[unsafe(method_family = none)]
59 pub unsafe fn outputFileURL(&self) -> Option<Retained<NSURL>>;
60
61 /// Tells the receiver to start recording to a new file, and specifies a delegate that will be notified when recording is finished.
62 ///
63 ///
64 /// Parameter `outputFileURL`: An NSURL object containing the URL of the output file. This method throws an NSInvalidArgumentException if the URL is not a valid file URL.
65 ///
66 /// Parameter `delegate`: An object conforming to the AVCaptureFileOutputRecordingDelegate protocol. Clients must specify a delegate so that they can be notified when recording to the given URL is finished.
67 ///
68 ///
69 /// The method sets the file URL to which the receiver is currently writing output media. If a file at the given URL already exists when capturing starts, recording to the new file will fail.
70 ///
71 /// Clients need not call stopRecording before calling this method while another recording is in progress. On macOS, if this method is invoked while an existing output file was already being recorded, no media samples will be discarded between the old file and the new file.
72 ///
73 /// When recording is stopped either by calling stopRecording, by changing files using this method, or because of an error, the remaining data that needs to be included to the file will be written in the background. Therefore, clients must specify a delegate that will be notified when all data has been written to the file using the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: method. The recording delegate can also optionally implement methods that inform it when data starts being written, when recording is paused and resumed, and when recording is about to be finished.
74 ///
75 /// On macOS, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method, the first samples written to the new file are guaranteed to be those contained in the sample buffer passed to that method.
76 ///
77 /// Note: AVCaptureAudioFileOutput does not support -startRecordingToOutputFileURL:recordingDelegate:. Use -startRecordingToOutputFileURL:outputFileType:recordingDelegate: instead.
78 #[unsafe(method(startRecordingToOutputFileURL:recordingDelegate:))]
79 #[unsafe(method_family = none)]
80 pub unsafe fn startRecordingToOutputFileURL_recordingDelegate(
81 &self,
82 output_file_url: &NSURL,
83 delegate: &ProtocolObject<dyn AVCaptureFileOutputRecordingDelegate>,
84 );
85
86 /// Tells the receiver to stop recording to the current file.
87 ///
88 ///
89 /// Clients can call this method when they want to stop recording new samples to the current file, and do not want to continue recording to another file. Clients that want to switch from one file to another should not call this method. Instead they should simply call startRecordingToOutputFileURL:recordingDelegate: with the new file URL.
90 ///
91 /// When recording is stopped either by calling this method, by changing files using startRecordingToOutputFileURL:recordingDelegate:, or because of an error, the remaining data that needs to be included to the file will be written in the background. Therefore, before using the file, clients must wait until the delegate that was specified in startRecordingToOutputFileURL:recordingDelegate: is notified when all data has been written to the file using the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: method.
92 ///
93 /// On macOS, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method, the last samples written to the current file are guaranteed to be those that were output immediately before those in the sample buffer passed to that method.
94 #[unsafe(method(stopRecording))]
95 #[unsafe(method_family = none)]
96 pub unsafe fn stopRecording(&self);
97
98 /// Indicates whether the receiver is currently recording.
99 ///
100 ///
101 /// The value of this property is YES when the receiver currently has a file to which it is writing new samples, NO otherwise.
102 #[unsafe(method(isRecording))]
103 #[unsafe(method_family = none)]
104 pub unsafe fn isRecording(&self) -> bool;
105
106 /// Indicates whether recording to the current output file is paused.
107 ///
108 ///
109 /// This property indicates recording to the file returned by outputFileURL has been previously paused using the pauseRecording method. When a recording is paused, captured samples are not written to the output file, but new samples can be written to the same file in the future by calling resumeRecording.
110 #[unsafe(method(isRecordingPaused))]
111 #[unsafe(method_family = none)]
112 pub unsafe fn isRecordingPaused(&self) -> bool;
113
114 /// Pauses recording to the current output file.
115 ///
116 ///
117 /// This method causes the receiver to stop writing captured samples to the current output file returned by outputFileURL, but leaves the file open so that samples can be written to it in the future, when resumeRecording is called. This allows clients to record multiple media segments that are not contiguous in time to a single file.
118 ///
119 /// On macOS, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method, the last samples written to the current file are guaranteed to be those that were output immediately before those in the sample buffer passed to that method.
120 ///
121 /// A recording can be stopped as normal, even when it's paused.
122 ///
123 /// A format or device change will result in the recording being stopped, even when it's paused.
124 #[unsafe(method(pauseRecording))]
125 #[unsafe(method_family = none)]
126 pub unsafe fn pauseRecording(&self);
127
128 /// Resumes recording to the current output file after it was previously paused using pauseRecording.
129 ///
130 ///
131 /// This method causes the receiver to resume writing captured samples to the current output file returned by outputFileURL, after recording was previously paused using pauseRecording. This allows clients to record multiple media segments that are not contiguous in time to a single file.
132 ///
133 /// On macOS, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method, the first samples written to the current file are guaranteed to be those contained in the sample buffer passed to that method.
134 #[unsafe(method(resumeRecording))]
135 #[unsafe(method_family = none)]
136 pub unsafe fn resumeRecording(&self);
137
138 #[cfg(feature = "objc2-core-media")]
139 /// Indicates the duration of the media recorded to the current output file.
140 ///
141 ///
142 /// If recording is in progress, this property returns the total time recorded so far.
143 #[unsafe(method(recordedDuration))]
144 #[unsafe(method_family = none)]
145 pub unsafe fn recordedDuration(&self) -> CMTime;
146
147 /// Indicates the size, in bytes, of the data recorded to the current output file.
148 ///
149 ///
150 /// If a recording is in progress, this property returns the size in bytes of the data recorded so far.
151 #[unsafe(method(recordedFileSize))]
152 #[unsafe(method_family = none)]
153 pub unsafe fn recordedFileSize(&self) -> i64;
154
155 #[cfg(feature = "objc2-core-media")]
156 /// Specifies the maximum duration of the media that should be recorded by the receiver.
157 ///
158 ///
159 /// This property specifies a hard limit on the duration of recorded files. Recording is stopped when the limit is reached and the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: delegate method is invoked with an appropriate error. The default value of this property is kCMTimeInvalid, which indicates no limit.
160 #[unsafe(method(maxRecordedDuration))]
161 #[unsafe(method_family = none)]
162 pub unsafe fn maxRecordedDuration(&self) -> CMTime;
163
164 #[cfg(feature = "objc2-core-media")]
165 /// Setter for [`maxRecordedDuration`][Self::maxRecordedDuration].
166 #[unsafe(method(setMaxRecordedDuration:))]
167 #[unsafe(method_family = none)]
168 pub unsafe fn setMaxRecordedDuration(&self, max_recorded_duration: CMTime);
169
170 /// Specifies the maximum size, in bytes, of the data that should be recorded by the receiver.
171 ///
172 ///
173 /// This property specifies a hard limit on the data size of recorded files. Recording is stopped when the limit is reached and the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: delegate method is invoked with an appropriate error. The default value of this property is 0, which indicates no limit.
174 #[unsafe(method(maxRecordedFileSize))]
175 #[unsafe(method_family = none)]
176 pub unsafe fn maxRecordedFileSize(&self) -> i64;
177
178 /// Setter for [`maxRecordedFileSize`][Self::maxRecordedFileSize].
179 #[unsafe(method(setMaxRecordedFileSize:))]
180 #[unsafe(method_family = none)]
181 pub unsafe fn setMaxRecordedFileSize(&self, max_recorded_file_size: i64);
182
183 /// Specifies the minimum amount of free space, in bytes, required for recording to continue on a given volume.
184 ///
185 ///
186 /// This property specifies a hard lower limit on the amount of free space that must remain on a target volume for recording to continue. Recording is stopped when the limit is reached and the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: delegate method is invoked with an appropriate error.
187 #[unsafe(method(minFreeDiskSpaceLimit))]
188 #[unsafe(method_family = none)]
189 pub unsafe fn minFreeDiskSpaceLimit(&self) -> i64;
190
191 /// Setter for [`minFreeDiskSpaceLimit`][Self::minFreeDiskSpaceLimit].
192 #[unsafe(method(setMinFreeDiskSpaceLimit:))]
193 #[unsafe(method_family = none)]
194 pub unsafe fn setMinFreeDiskSpaceLimit(&self, min_free_disk_space_limit: i64);
195 );
196}
197
198/// Methods declared on superclass `AVCaptureOutput`.
199#[cfg(feature = "AVCaptureOutputBase")]
200impl AVCaptureFileOutput {
201 extern_methods!(
202 #[unsafe(method(init))]
203 #[unsafe(method_family = init)]
204 pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
205
206 #[unsafe(method(new))]
207 #[unsafe(method_family = new)]
208 pub unsafe fn new() -> Retained<Self>;
209 );
210}
211
212extern_protocol!(
213 /// Defines an interface for delegates of AVCaptureFileOutput to respond to events that occur in the process of recording a single file.
214 ///
215 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturefileoutputrecordingdelegate?language=objc)
216 pub unsafe trait AVCaptureFileOutputRecordingDelegate: NSObjectProtocol {
217 #[cfg(all(feature = "AVCaptureOutputBase", feature = "AVCaptureSession"))]
218 /// Informs the delegate when the output has started writing to a file.
219 ///
220 ///
221 /// Parameter `output`: The capture file output that started writing the file.
222 ///
223 /// Parameter `fileURL`: The file URL of the file that is being written.
224 ///
225 /// Parameter `connections`: An array of AVCaptureConnection objects attached to the file output that provided the data that is being written to the file.
226 ///
227 ///
228 /// This method is called when the file output has started writing data to a file. If an error condition prevents any data from being written, this method may not be called. captureOutput:willFinishRecordingToOutputFileAtURL:fromConnections:error: and captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: will always be called, even if no data is written.
229 ///
230 /// Clients should not assume that this method will be called on a specific thread, and should also try to make this method as efficient as possible.
231 #[optional]
232 #[unsafe(method(captureOutput:didStartRecordingToOutputFileAtURL:fromConnections:))]
233 #[unsafe(method_family = none)]
234 unsafe fn captureOutput_didStartRecordingToOutputFileAtURL_fromConnections(
235 &self,
236 output: &AVCaptureFileOutput,
237 file_url: &NSURL,
238 connections: &NSArray<AVCaptureConnection>,
239 );
240
241 #[cfg(all(
242 feature = "AVCaptureOutputBase",
243 feature = "AVCaptureSession",
244 feature = "objc2-core-media"
245 ))]
246 /// Informs the delegate when the output has started writing to a file.
247 ///
248 ///
249 /// Parameter `output`: The capture file output that started writing the file.
250 ///
251 /// Parameter `fileURL`: The file URL of the file that is being written.
252 ///
253 /// Parameter `startPTS`: The timestamp of the first buffer written to the file, synced with AVCaptureSession.synchronizationClock
254 ///
255 /// Parameter `connections`: An array of AVCaptureConnection objects attached to the file output that provided the data that is being written to the file.
256 ///
257 ///
258 /// This method is called when the file output has started writing data to a file. If an error condition prevents any data from being written, this method may not be called. captureOutput:willFinishRecordingToOutputFileAtURL:fromConnections:error: and captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: will always be called, even if no data is written.
259 ///
260 /// If this method is implemented, the alternative delegate callback -captureOutput:didStartRecordingToOutputFileAtURL:fromConnections will not be called.
261 ///
262 /// Clients should not assume that this method will be called on a specific thread, and should also try to make this method as efficient as possible.
263 #[optional]
264 #[unsafe(method(captureOutput:didStartRecordingToOutputFileAtURL:startPTS:fromConnections:))]
265 #[unsafe(method_family = none)]
266 unsafe fn captureOutput_didStartRecordingToOutputFileAtURL_startPTS_fromConnections(
267 &self,
268 output: &AVCaptureFileOutput,
269 file_url: &NSURL,
270 start_pts: CMTime,
271 connections: &NSArray<AVCaptureConnection>,
272 );
273
274 #[cfg(all(feature = "AVCaptureOutputBase", feature = "AVCaptureSession"))]
275 /// Called whenever the output is recording to a file and successfully pauses the recording at the request of the client.
276 ///
277 ///
278 /// Parameter `output`: The capture file output that has paused its file recording.
279 ///
280 /// Parameter `fileURL`: The file URL of the file that is being written.
281 ///
282 /// Parameter `connections`: An array of AVCaptureConnection objects attached to the file output that provided the data that is being written to the file.
283 ///
284 ///
285 /// Delegates can use this method to be informed when a request to pause recording is actually respected. It is safe for delegates to change what the file output is currently doing (starting a new file, for example) from within this method. If recording to a file is stopped, either manually or due to an error, this method is not guaranteed to be called, even if a previous call to pauseRecording was made.
286 ///
287 /// Clients should not assume that this method will be called on a specific thread, and should also try to make this method as efficient as possible.
288 #[optional]
289 #[unsafe(method(captureOutput:didPauseRecordingToOutputFileAtURL:fromConnections:))]
290 #[unsafe(method_family = none)]
291 unsafe fn captureOutput_didPauseRecordingToOutputFileAtURL_fromConnections(
292 &self,
293 output: &AVCaptureFileOutput,
294 file_url: &NSURL,
295 connections: &NSArray<AVCaptureConnection>,
296 );
297
298 #[cfg(all(feature = "AVCaptureOutputBase", feature = "AVCaptureSession"))]
299 /// Called whenever the output, at the request of the client, successfully resumes a file recording that was paused.
300 ///
301 ///
302 /// Parameter `output`: The capture file output that has resumed its paused file recording.
303 ///
304 /// Parameter `fileURL`: The file URL of the file that is being written.
305 ///
306 /// Parameter `connections`: An array of AVCaptureConnection objects attached to the file output that provided the data that is being written to the file.
307 ///
308 ///
309 /// Delegates can use this method to be informed when a request to resume recording is actually respected. It is safe for delegates to change what the file output is currently doing (starting a new file, for example) from within this method. If recording to a file is stopped, either manually or due to an error, this method is not guaranteed to be called, even if a previous call to resumeRecording was made.
310 ///
311 /// Clients should not assume that this method will be called on a specific thread, and should also try to make this method as efficient as possible.
312 #[optional]
313 #[unsafe(method(captureOutput:didResumeRecordingToOutputFileAtURL:fromConnections:))]
314 #[unsafe(method_family = none)]
315 unsafe fn captureOutput_didResumeRecordingToOutputFileAtURL_fromConnections(
316 &self,
317 output: &AVCaptureFileOutput,
318 file_url: &NSURL,
319 connections: &NSArray<AVCaptureConnection>,
320 );
321
322 #[cfg(all(feature = "AVCaptureOutputBase", feature = "AVCaptureSession"))]
323 /// Informs the delegate when the output will stop writing new samples to a file.
324 ///
325 ///
326 /// Parameter `output`: The capture file output that will finish writing the file.
327 ///
328 /// Parameter `fileURL`: The file URL of the file that is being written.
329 ///
330 /// Parameter `connections`: An array of AVCaptureConnection objects attached to the file output that provided the data that is being written to the file.
331 ///
332 /// Parameter `error`: An error describing what caused the file to stop recording, or nil if there was no error.
333 ///
334 ///
335 /// This method is called when the file output will stop recording new samples to the file at outputFileURL, either because startRecordingToOutputFileURL:recordingDelegate: or stopRecording were called, or because an error, described by the error parameter, occurred (if no error occurred, the error parameter will be nil). This method will always be called for each recording request, even if no data is successfully written to the file.
336 ///
337 /// Clients should not assume that this method will be called on a specific thread, and should also try to make this method as efficient as possible.
338 #[optional]
339 #[unsafe(method(captureOutput:willFinishRecordingToOutputFileAtURL:fromConnections:error:))]
340 #[unsafe(method_family = none)]
341 unsafe fn captureOutput_willFinishRecordingToOutputFileAtURL_fromConnections_error(
342 &self,
343 output: &AVCaptureFileOutput,
344 file_url: &NSURL,
345 connections: &NSArray<AVCaptureConnection>,
346 error: Option<&NSError>,
347 );
348
349 #[cfg(all(feature = "AVCaptureOutputBase", feature = "AVCaptureSession"))]
350 /// Informs the delegate when all pending data has been written to an output file.
351 ///
352 ///
353 /// Parameter `output`: The capture file output that has finished writing the file.
354 ///
355 /// Parameter `outputFileURL`: The file URL of the file that has been written.
356 ///
357 /// Parameter `connections`: An array of AVCaptureConnection objects attached to the file output that provided the data that was written to the file.
358 ///
359 /// Parameter `error`: An error describing what caused the file to stop recording, or nil if there was no error.
360 ///
361 ///
362 /// This method is called when the file output has finished writing all data to a file whose recording was stopped, either because startRecordingToOutputFileURL:recordingDelegate: or stopRecording were called, or because an error, described by the error parameter, occurred (if no error occurred, the error parameter will be nil). This method will always be called for each recording request, even if no data is successfully written to the file.
363 ///
364 /// Clients should not assume that this method will be called on a specific thread.
365 ///
366 /// Delegates are required to implement this method.
367 #[unsafe(method(captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error:))]
368 #[unsafe(method_family = none)]
369 unsafe fn captureOutput_didFinishRecordingToOutputFileAtURL_fromConnections_error(
370 &self,
371 output: &AVCaptureFileOutput,
372 output_file_url: &NSURL,
373 connections: &NSArray<AVCaptureConnection>,
374 error: Option<&NSError>,
375 );
376 }
377);
378
379extern_protocol!(
380 /// Defines an interface for delegates of AVCaptureFileOutput to monitor and control recordings along exact sample boundaries.
381 ///
382 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturefileoutputdelegate?language=objc)
383 pub unsafe trait AVCaptureFileOutputDelegate: NSObjectProtocol {
384 #[cfg(feature = "AVCaptureOutputBase")]
385 /// Allows a client to opt in to frame accurate record-start in captureOutput:didOutputSampleBuffer:fromConnection:
386 ///
387 ///
388 /// Parameter `output`: The AVCaptureFileOutput instance with which the delegate is associated.
389 ///
390 ///
391 /// In apps linked before macOS 10.8, delegates that implement the captureOutput:didOutputSampleBuffer:fromConnection: method can ensure frame accurate start / stop of a recording by calling startRecordingToOutputFileURL:recordingDelegate: from within the callback. Frame accurate start requires the capture output to apply outputSettings when the session starts running, so it is ready to record on any given frame boundary. Compressing all the time while the session is running has power, thermal, and CPU implications. In apps linked on or after macOS 10.8, delegates must implement captureOutputShouldProvideSampleAccurateRecordingStart: to indicate whether frame accurate start/stop recording is required (returning YES) or not (returning NO). The output calls this method as soon as the delegate is added, and never again. If your delegate returns NO, the capture output applies compression settings when startRecordingToOutputFileURL:recordingDelegate: is called, and disables compression settings after the recording is stopped.
392 #[unsafe(method(captureOutputShouldProvideSampleAccurateRecordingStart:))]
393 #[unsafe(method_family = none)]
394 unsafe fn captureOutputShouldProvideSampleAccurateRecordingStart(
395 &self,
396 output: &AVCaptureFileOutput,
397 ) -> bool;
398
399 #[cfg(all(
400 feature = "AVCaptureOutputBase",
401 feature = "AVCaptureSession",
402 feature = "objc2-core-media"
403 ))]
404 /// Gives the delegate the opportunity to inspect samples as they are received by the output and optionally start and stop recording at exact times.
405 ///
406 ///
407 /// Parameter `output`: The capture file output that is receiving the media data.
408 ///
409 /// Parameter `sampleBuffer`: A CMSampleBuffer object containing the sample data and additional information about the sample, such as its format and presentation time.
410 ///
411 /// Parameter `connection`: The AVCaptureConnection object attached to the file output from which the sample data was received.
412 ///
413 ///
414 /// This method is called whenever the file output receives a single sample buffer (a single video frame or audio buffer, for example) from the given connection. This gives delegates an opportunity to start and stop recording or change output files at an exact sample boundary if -captureOutputShouldProvideSampleAccurateRecordingStart: returns YES. If called from within this method, the file output's startRecordingToOutputFileURL:recordingDelegate: and resumeRecording methods are guaranteed to include the received sample buffer in the new file, whereas calls to stopRecording and pauseRecording are guaranteed to include all samples leading up to those in the current sample buffer in the existing file.
415 ///
416 /// Delegates can gather information particular to the samples by inspecting the CMSampleBuffer object. Sample buffers always contain a single frame of video if called from this method but may also contain multiple samples of audio. For B-frame video formats, samples are always delivered in presentation order.
417 ///
418 /// Clients that need to reference the CMSampleBuffer object outside of the scope of this method must CFRetain it and then CFRelease it when they are finished with it.
419 ///
420 /// Note that to maintain optimal performance, some sample buffers directly reference pools of memory that may need to be reused by the device system and other capture inputs. This is frequently the case for uncompressed device native capture where memory blocks are copied as little as possible. If multiple sample buffers reference such pools of memory for too long, inputs will no longer be able to copy new samples into memory and those samples will be dropped. If your application is causing samples to be dropped by retaining the provided CMSampleBuffer objects for too long, but it needs access to the sample data for a long period of time, consider copying the data into a new buffer and then calling CFRelease on the sample buffer if it was previously retained so that the memory it references can be reused.
421 ///
422 /// Clients should not assume that this method will be called on a specific thread. In addition, this method is called periodically, so it must be efficient to prevent capture performance problems.
423 #[optional]
424 #[unsafe(method(captureOutput:didOutputSampleBuffer:fromConnection:))]
425 #[unsafe(method_family = none)]
426 unsafe fn captureOutput_didOutputSampleBuffer_fromConnection(
427 &self,
428 output: &AVCaptureFileOutput,
429 sample_buffer: &CMSampleBuffer,
430 connection: &AVCaptureConnection,
431 );
432 }
433);
434
435extern_class!(
436 /// AVCaptureMovieFileOutput is a concrete subclass of AVCaptureFileOutput that writes captured media to QuickTime movie files.
437 ///
438 ///
439 /// AVCaptureMovieFileOutput implements the complete file recording interface declared by AVCaptureFileOutput for writing media data to QuickTime movie files. In addition, instances of AVCaptureMovieFileOutput allow clients to configure options specific to the QuickTime file format, including allowing them to write metadata collections to each file, specify media encoding options for each track (macOS), and specify an interval at which movie fragments should be written.
440 ///
441 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcapturemoviefileoutput?language=objc)
442 #[unsafe(super(AVCaptureFileOutput, AVCaptureOutput, NSObject))]
443 #[derive(Debug, PartialEq, Eq, Hash)]
444 #[cfg(feature = "AVCaptureOutputBase")]
445 pub struct AVCaptureMovieFileOutput;
446);
447
448#[cfg(feature = "AVCaptureOutputBase")]
449unsafe impl NSObjectProtocol for AVCaptureMovieFileOutput {}
450
451#[cfg(feature = "AVCaptureOutputBase")]
452impl AVCaptureMovieFileOutput {
453 extern_methods!(
454 #[unsafe(method(init))]
455 #[unsafe(method_family = init)]
456 pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
457
458 #[unsafe(method(new))]
459 #[unsafe(method_family = new)]
460 pub unsafe fn new() -> Retained<Self>;
461
462 #[cfg(feature = "objc2-core-media")]
463 /// Specifies the frequency with which movie fragments should be written.
464 ///
465 ///
466 /// When movie fragments are used, a partially written QuickTime movie file whose writing is unexpectedly interrupted can be successfully opened and played up to multiples of the specified time interval. A value of kCMTimeInvalid indicates that movie fragments should not be used, but that only a movie atom describing all of the media in the file should be written. The default value of this property is ten seconds.
467 ///
468 /// Changing the value of this property will not affect the movie fragment interval of the file currently being written, if there is one.
469 ///
470 /// For best writing performance on external storage devices, set the movieFragmentInterval to 10 seconds or greater. If the size of a movie fragment is greater than or equal to 2GB, an interval is added at 2GB mark.
471 #[unsafe(method(movieFragmentInterval))]
472 #[unsafe(method_family = none)]
473 pub unsafe fn movieFragmentInterval(&self) -> CMTime;
474
475 #[cfg(feature = "objc2-core-media")]
476 /// Setter for [`movieFragmentInterval`][Self::movieFragmentInterval].
477 #[unsafe(method(setMovieFragmentInterval:))]
478 #[unsafe(method_family = none)]
479 pub unsafe fn setMovieFragmentInterval(&self, movie_fragment_interval: CMTime);
480
481 #[cfg(feature = "AVMetadataItem")]
482 /// A collection of metadata to be written to the receiver's output files.
483 ///
484 ///
485 /// The value of this property is an array of AVMetadataItem objects representing the collection of top-level metadata to be written in each output file.
486 #[unsafe(method(metadata))]
487 #[unsafe(method_family = none)]
488 pub unsafe fn metadata(&self) -> Option<Retained<NSArray<AVMetadataItem>>>;
489
490 #[cfg(feature = "AVMetadataItem")]
491 /// Setter for [`metadata`][Self::metadata].
492 #[unsafe(method(setMetadata:))]
493 #[unsafe(method_family = none)]
494 pub unsafe fn setMetadata(&self, metadata: Option<&NSArray<AVMetadataItem>>);
495
496 #[cfg(feature = "AVVideoSettings")]
497 /// Indicates the supported video codec formats that can be specified in setOutputSettingsForConnection:.
498 ///
499 ///
500 /// The value of this property is an NSArray of AVVideoCodecTypes that can be used as values for the AVVideoCodecKey in the receiver's setOutputSettingsForConnection: dictionary. The array of available video codecs may change depending on the current session preset. The first codec in the array is used by default when recording a file.
501 #[unsafe(method(availableVideoCodecTypes))]
502 #[unsafe(method_family = none)]
503 pub unsafe fn availableVideoCodecTypes(&self) -> Retained<NSArray<AVVideoCodecType>>;
504
505 #[cfg(feature = "AVCaptureSession")]
506 /// Indicates the supported keys that can be specified in setOutputSettings:forConnection:.
507 ///
508 ///
509 /// Parameter `connection`: The connection delivering the media to be encoded.
510 ///
511 ///
512 /// Returns an NSArray of NSStrings listing the allowable keys in the receiver's setOutputSettings:forConnection: dictionary.
513 #[unsafe(method(supportedOutputSettingsKeysForConnection:))]
514 #[unsafe(method_family = none)]
515 pub unsafe fn supportedOutputSettingsKeysForConnection(
516 &self,
517 connection: &AVCaptureConnection,
518 ) -> Retained<NSArray<NSString>>;
519
520 #[cfg(feature = "AVCaptureSession")]
521 /// Returns the options the receiver uses to encode media from the given connection as it is being recorded.
522 ///
523 ///
524 /// Parameter `connection`: The connection delivering the media to be encoded.
525 ///
526 /// Returns: An NSDictionary of output settings.
527 ///
528 ///
529 /// See AVAudioSettings.h for audio connections or AVVideoSettings.h for video connections for more information on the structure of an output settings dictionary. If the returned value is an empty dictionary (i.e. [NSDictionary dictionary], the format of the media from the connection will not be changed before being written to the file. If -setOutputSettings:forConnection: was called with a nil dictionary, this method returns a non-nil dictionary reflecting the settings used by the AVCaptureSession's current sessionPreset.
530 #[unsafe(method(outputSettingsForConnection:))]
531 #[unsafe(method_family = none)]
532 pub unsafe fn outputSettingsForConnection(
533 &self,
534 connection: &AVCaptureConnection,
535 ) -> Retained<NSDictionary<NSString, AnyObject>>;
536
537 #[cfg(feature = "AVCaptureSession")]
538 /// Sets the options the receiver uses to encode media from the given connection as it is being recorded.
539 ///
540 ///
541 /// Parameter `outputSettings`: An NSDictionary of output settings.
542 ///
543 /// Parameter `connection`: The connection delivering the media to be encoded.
544 ///
545 ///
546 /// See AVAudioSettings.h for audio connections or AVVideoSettings.h for video connections for more information on how to construct an output settings dictionary. A value of an empty dictionary (i.e. +[NSDictionary dictionary]), means that the format of the media from the connection should not be changed before being written to the file. A value of nil means that the output format will be determined by the session preset. In this case, -outputSettingsForConnection: will return a non-nil dictionary reflecting the settings used by the AVCaptureSession's current sessionPreset.
547 ///
548 /// On iOS, your outputSettings dictionary may only contain keys listed in - supportedOutputSettingsKeysForConnection:. If you specify any other key, an NSInvalidArgumentException will be thrown. Further restrictions may be imposed on the AVVideoCodecTypeKey. Its value should be present in the -availableVideoCodecTypes array. If AVVideoCompressionPropertiesKey is specified, you must also specify a valid value for AVVideoCodecKey. On iOS versions prior to 12.0, the only settable key for video connections is AVVideoCodecTypeKey. On iOS 12.0 and later, video connections gain support for AVVideoCompressionPropertiesKey.
549 ///
550 /// On iOS, -outputSettingsForConnection: always provides a fully populated dictionary. If you call -outputSettingsForConnection: with the intent of overriding a few of the values, you must take care to exclude keys that are not supported before calling -setOutputSettings:forConnection:. When providing an AVVideoCompressionPropertiesKey sub dictionary, you may specify a sparse dictionary. AVCaptureMovieFileOutput will always fill in missing keys with default values for the current AVCaptureSession configuration.
551 #[unsafe(method(setOutputSettings:forConnection:))]
552 #[unsafe(method_family = none)]
553 pub unsafe fn setOutputSettings_forConnection(
554 &self,
555 output_settings: Option<&NSDictionary<NSString, AnyObject>>,
556 connection: &AVCaptureConnection,
557 );
558
559 #[cfg(feature = "AVCaptureSession")]
560 /// Returns YES if the movie file output will create a timed metadata track that records samples which reflect changes made to the given connection's videoOrientation and videoMirrored properties during recording.
561 ///
562 ///
563 /// Parameter `connection`: A connection delivering video media to the movie file output. This method throws an NSInvalidArgumentException if the connection does not have a mediaType of AVMediaTypeVideo or if the connection does not terminate at the movie file output.
564 ///
565 ///
566 /// See setRecordsVideoOrientationAndMirroringChanges:asMetadataTrackForConnection: for details on the behavior controlled by this value. The default value returned is NO.
567 #[unsafe(method(recordsVideoOrientationAndMirroringChangesAsMetadataTrackForConnection:))]
568 #[unsafe(method_family = none)]
569 pub unsafe fn recordsVideoOrientationAndMirroringChangesAsMetadataTrackForConnection(
570 &self,
571 connection: &AVCaptureConnection,
572 ) -> bool;
573
574 #[cfg(feature = "AVCaptureSession")]
575 /// Controls whether or not the movie file output will create a timed metadata track that records samples which reflect changes made to the given connection's videoOrientation and videoMirrored properties during recording.
576 ///
577 ///
578 /// Parameter `doRecordChanges`: If YES, the movie file output will create a timed metadata track that records samples which reflect changes made to the given connection's videoOrientation and videoMirrored properties during recording.
579 ///
580 /// Parameter `connection`: A connection delivering video media to the movie file output. This method throws an NSInvalidArgumentException if the connection does not have a mediaType of AVMediaTypeVideo or if the connection does not terminate at the movie file output.
581 ///
582 ///
583 /// When a recording is started the current state of a video capture connection's videoOrientation and videoMirrored properties are used to build the display matrix for the created video track. The movie file format allows only one display matrix per track, which means that any changes made during a recording to the videoOrientation and videoMirrored properties are not captured. For example, a user starts a recording with their device in the portrait orientation, and then partway through the recording changes the device to a landscape orientation. The landscape orientation requires a different display matrix, but only the initial display matrix (the portrait display matrix) is recorded for the video track.
584 ///
585 /// By invoking this method the client application directs the movie file output to create an additional track in the captured movie. This track is a timed metadata track that is associated with the video track, and contains one or more samples that contain a Video Orientation value (as defined by EXIF and TIFF specifications, which is enumerated by CGImagePropertyOrientation in
586 /// <ImageIO
587 /// /CGImageProperties.h>). The value represents the display matrix corresponding to the AVCaptureConnection's videoOrientation and videoMirrored properties when applied to the input source. The initial sample written to the timed metadata track represents video track's display matrix. During recording additional samples will be written to the timed metadata track whenever the client application changes the video connection's videoOrienation or videoMirrored properties. Using the above example, when the client application detects the user changing the device from portrait to landscape orientation, it updates the video connection's videoOrientation property, thus causing the movie file output to add a new sample to the timed metadata track.
588 ///
589 /// After capture, playback and editing applications can use the timed metadata track to enhance their user's experience. For example, when playing back the captured movie, a playback engine can use the samples to adjust the display of the video samples to keep the video properly oriented. Another example is an editing application that uses the sample the sample times to suggest cut points for breaking the captured movie into separate clips, where each clip is properly oriented.
590 ///
591 /// The default behavior is to not create the timed metadata track.
592 ///
593 /// The doRecordChanges value is only observed at the start of recording. Changes to the value will not have any effect until the next recording is started.
594 #[unsafe(method(setRecordsVideoOrientationAndMirroringChanges:asMetadataTrackForConnection:))]
595 #[unsafe(method_family = none)]
596 pub unsafe fn setRecordsVideoOrientationAndMirroringChanges_asMetadataTrackForConnection(
597 &self,
598 do_record_changes: bool,
599 connection: &AVCaptureConnection,
600 );
601
602 /// Enable or disable a constituent device selection behavior when recording.
603 ///
604 ///
605 /// This property enables a camera selection behavior to be applied when recording a movie. Once recording starts, the specified behavior and conditions take effect. Once recording stops the camera selection will change back to the primaryConstituentDeviceSwitchingBehavior specified by the AVCaptureDevice. By default, this property is set to YES when connected to an AVCaptureDevice that supports constituent device switching.
606 #[unsafe(method(isPrimaryConstituentDeviceSwitchingBehaviorForRecordingEnabled))]
607 #[unsafe(method_family = none)]
608 pub unsafe fn isPrimaryConstituentDeviceSwitchingBehaviorForRecordingEnabled(&self)
609 -> bool;
610
611 /// Setter for [`isPrimaryConstituentDeviceSwitchingBehaviorForRecordingEnabled`][Self::isPrimaryConstituentDeviceSwitchingBehaviorForRecordingEnabled].
612 #[unsafe(method(setPrimaryConstituentDeviceSwitchingBehaviorForRecordingEnabled:))]
613 #[unsafe(method_family = none)]
614 pub unsafe fn setPrimaryConstituentDeviceSwitchingBehaviorForRecordingEnabled(
615 &self,
616 primary_constituent_device_switching_behavior_for_recording_enabled: bool,
617 );
618
619 #[cfg(feature = "AVCaptureDevice")]
620 /// When primaryConstituentDeviceSwitchingBehaviorForRecordingEnabled is set to YES, this method controls the switching behavior and conditions, while a movie file is being recorded.
621 ///
622 ///
623 /// This controls the camera selection behavior used while recording a movie, when enabled through primaryConstituentDeviceSwitchingBehaviorForRecordingEnabled. Setting the switching behavior to anything other than AVCapturePrimaryConstituentDeviceSwitchingBehaviorUnsupported when connected to an AVCaptureDevice that does not suport constituent device selection throws an NSInvalidArgumentException. Setting restrictedSwitchingBehaviorConditions to something other than AVCapturePrimaryConstituentDeviceRestrictedSwitchingBehaviorConditionNone while setting switchingBehavior to something other than AVCapturePrimaryConstituentDeviceSwitchingBehaviorRestricted throws an NSInvalidArgumentException exception.
624 #[unsafe(method(setPrimaryConstituentDeviceSwitchingBehaviorForRecording:restrictedSwitchingBehaviorConditions:))]
625 #[unsafe(method_family = none)]
626 pub unsafe fn setPrimaryConstituentDeviceSwitchingBehaviorForRecording_restrictedSwitchingBehaviorConditions(
627 &self,
628 switching_behavior: AVCapturePrimaryConstituentDeviceSwitchingBehavior,
629 restricted_switching_behavior_conditions: AVCapturePrimaryConstituentDeviceRestrictedSwitchingBehaviorConditions,
630 );
631
632 #[cfg(feature = "AVCaptureDevice")]
633 /// The primaryConstituentDeviceSwitchingBehavior as set by -[AVCaptureMovieFileOutput setPrimaryConstituentDeviceSwitchingBehaviorForRecording:restrictedSwitchingBehaviorConditions:].
634 ///
635 ///
636 /// By default, this property is set to AVCapturePrimaryConstituentDeviceSwitchingBehaviorRestricted. This property is key-value observable.
637 #[unsafe(method(primaryConstituentDeviceSwitchingBehaviorForRecording))]
638 #[unsafe(method_family = none)]
639 pub unsafe fn primaryConstituentDeviceSwitchingBehaviorForRecording(
640 &self,
641 ) -> AVCapturePrimaryConstituentDeviceSwitchingBehavior;
642
643 #[cfg(feature = "AVCaptureDevice")]
644 /// The primaryConstituentDeviceRestrictedSwitchingBehaviorConditions as set by -[AVCaptureMovieFileOutput setPrimaryConstituentDeviceSwitchingBehaviorForRecording:restrictedSwitchingBehaviorConditions:].
645 ///
646 ///
647 /// By default, this property is set to AVCapturePrimaryConstituentDeviceRestrictedSwitchingBehaviorCondition{VideoZoomChanged | FocusModeChanged | ExposureModeChanged}. This property is key-value observable.
648 #[unsafe(method(primaryConstituentDeviceRestrictedSwitchingBehaviorConditionsForRecording))]
649 #[unsafe(method_family = none)]
650 pub unsafe fn primaryConstituentDeviceRestrictedSwitchingBehaviorConditionsForRecording(
651 &self,
652 ) -> AVCapturePrimaryConstituentDeviceRestrictedSwitchingBehaviorConditions;
653
654 /// Returns whether or not capturing spatial video to a file is supported. Note that in order to be supported, two conditions must be met. (1) The source AVCaptureDevice's activeFormat.spatialVideoCaptureSupported property must return YES. (2) The video AVCaptureConnection's activeVideoStabilizationMode property must return AVCaptureVideoStabilizationModeCinematic, AVCaptureVideoStabilizationModeCinematicExtended, or AVCaptureVideoStabilizationModeCinematicExtendedEnhanced.
655 #[unsafe(method(isSpatialVideoCaptureSupported))]
656 #[unsafe(method_family = none)]
657 pub unsafe fn isSpatialVideoCaptureSupported(&self) -> bool;
658
659 /// Enable or disable capturing spatial video to a file.
660 ///
661 ///
662 /// This property enables capturing spatial video to a file. By default, this property is set to NO. Check spatialVideoCaptureSupported before setting this property, as setting to YES will throw an exception if the feature is not supported.
663 ///
664 /// On iOS, enabling spatial video will overwrite the connected AVCaptureDevice's `videoZoomFactor`, `minAvailableVideoZoomFactor`, and `maxAvailableVideoZoomFactor` to the field of view of the narrower camera in the pair.
665 #[unsafe(method(isSpatialVideoCaptureEnabled))]
666 #[unsafe(method_family = none)]
667 pub unsafe fn isSpatialVideoCaptureEnabled(&self) -> bool;
668
669 /// Setter for [`isSpatialVideoCaptureEnabled`][Self::isSpatialVideoCaptureEnabled].
670 #[unsafe(method(setSpatialVideoCaptureEnabled:))]
671 #[unsafe(method_family = none)]
672 pub unsafe fn setSpatialVideoCaptureEnabled(&self, spatial_video_capture_enabled: bool);
673 );
674}
675
676extern_class!(
677 /// AVCaptureAudioFileOutput is a concrete subclass of AVCaptureFileOutput that writes captured audio to any audio file type supported by CoreAudio.
678 ///
679 ///
680 /// AVCaptureAudioFileOutput implements the complete file recording interface declared by AVCaptureFileOutput for writing media data to audio files. In addition, instances of AVCaptureAudioFileOutput allow clients to configure options specific to the audio file formats, including allowing them to write metadata collections to each file and specify audio encoding options.
681 ///
682 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfoundation/avcaptureaudiofileoutput?language=objc)
683 #[unsafe(super(AVCaptureFileOutput, AVCaptureOutput, NSObject))]
684 #[derive(Debug, PartialEq, Eq, Hash)]
685 #[cfg(feature = "AVCaptureOutputBase")]
686 pub struct AVCaptureAudioFileOutput;
687);
688
689#[cfg(feature = "AVCaptureOutputBase")]
690unsafe impl NSObjectProtocol for AVCaptureAudioFileOutput {}
691
692#[cfg(feature = "AVCaptureOutputBase")]
693impl AVCaptureAudioFileOutput {
694 extern_methods!(
695 #[unsafe(method(init))]
696 #[unsafe(method_family = init)]
697 pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
698
699 #[unsafe(method(new))]
700 #[unsafe(method_family = new)]
701 pub unsafe fn new() -> Retained<Self>;
702
703 #[cfg(feature = "AVMediaFormat")]
704 /// Provides the file types AVCaptureAudioFileOutput can write.
705 ///
706 ///
707 /// Returns: An NSArray of UTIs identifying the file types the AVCaptureAudioFileOutput class can write.
708 #[unsafe(method(availableOutputFileTypes))]
709 #[unsafe(method_family = none)]
710 pub unsafe fn availableOutputFileTypes() -> Retained<NSArray<AVFileType>>;
711
712 #[cfg(feature = "AVMediaFormat")]
713 /// Tells the receiver to start recording to a new file of the specified format, and specifies a delegate that will be notified when recording is finished.
714 ///
715 ///
716 /// Parameter `outputFileURL`: An NSURL object containing the URL of the output file. This method throws an NSInvalidArgumentException if the URL is not a valid file URL.
717 ///
718 /// Parameter `fileType`: A UTI indicating the format of the file to be written.
719 ///
720 /// Parameter `delegate`: An object conforming to the AVCaptureFileOutputRecordingDelegate protocol. Clients must specify a delegate so that they can be notified when recording to the given URL is finished.
721 ///
722 ///
723 /// The method sets the file URL to which the receiver is currently writing output media. If a file at the given URL already exists when capturing starts, recording to the new file will fail.
724 ///
725 /// The fileType argument is a UTI corresponding to the audio file format that should be written. UTIs for common audio file types are declared in AVMediaFormat.h.
726 ///
727 /// Clients need not call stopRecording before calling this method while another recording is in progress. If this method is invoked while an existing output file was already being recorded, no media samples will be discarded between the old file and the new file.
728 ///
729 /// When recording is stopped either by calling stopRecording, by changing files using this method, or because of an error, the remaining data that needs to be included to the file will be written in the background. Therefore, clients must specify a delegate that will be notified when all data has been written to the file using the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: method. The recording delegate can also optionally implement methods that inform it when data starts being written, when recording is paused and resumed, and when recording is about to be finished.
730 ///
731 /// On macOS, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method, the first samples written to the new file are guaranteed to be those contained in the sample buffer passed to that method.
732 #[unsafe(method(startRecordingToOutputFileURL:outputFileType:recordingDelegate:))]
733 #[unsafe(method_family = none)]
734 pub unsafe fn startRecordingToOutputFileURL_outputFileType_recordingDelegate(
735 &self,
736 output_file_url: &NSURL,
737 file_type: &AVFileType,
738 delegate: &ProtocolObject<dyn AVCaptureFileOutputRecordingDelegate>,
739 );
740
741 #[cfg(feature = "AVMetadataItem")]
742 /// A collection of metadata to be written to the receiver's output files.
743 ///
744 ///
745 /// The value of this property is an array of AVMetadataItem objects representing the collection of top-level metadata to be written in each output file. Only ID3 v2.2, v2.3, or v2.4 style metadata items are supported.
746 #[unsafe(method(metadata))]
747 #[unsafe(method_family = none)]
748 pub unsafe fn metadata(&self) -> Retained<NSArray<AVMetadataItem>>;
749
750 #[cfg(feature = "AVMetadataItem")]
751 /// Setter for [`metadata`][Self::metadata].
752 #[unsafe(method(setMetadata:))]
753 #[unsafe(method_family = none)]
754 pub unsafe fn setMetadata(&self, metadata: &NSArray<AVMetadataItem>);
755
756 /// Specifies the options the receiver uses to re-encode audio as it is being recorded.
757 ///
758 ///
759 /// The output settings dictionary can contain values for keys from AVAudioSettings.h. A value of nil indicates that the format of the audio should not be changed before being written to the file.
760 #[unsafe(method(audioSettings))]
761 #[unsafe(method_family = none)]
762 pub unsafe fn audioSettings(&self) -> Option<Retained<NSDictionary<NSString, AnyObject>>>;
763
764 /// Setter for [`audioSettings`][Self::audioSettings].
765 #[unsafe(method(setAudioSettings:))]
766 #[unsafe(method_family = none)]
767 pub unsafe fn setAudioSettings(
768 &self,
769 audio_settings: Option<&NSDictionary<NSString, AnyObject>>,
770 );
771 );
772}