objc2_avf_audio/generated/AVAudioSessionTypes.rs
1//! This file has been automatically generated by `objc2`'s `header-translator`.
2//! DO NOT EDIT
3use objc2::__framework_prelude::*;
4use objc2_foundation::*;
5
6use crate::*;
7
8/// A port describes a specific type of audio input or output device or connector.
9///
10/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionport?language=objc)
11// NS_TYPED_ENUM
12pub type AVAudioSessionPort = NSString;
13
14extern "C" {
15 /// Continuity microphone for appletv.
16 ///
17 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportcontinuitymicrophone?language=objc)
18 pub static AVAudioSessionPortContinuityMicrophone: Option<&'static AVAudioSessionPort>;
19}
20
21extern "C" {
22 /// Line level input on a dock connector
23 ///
24 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportlinein?language=objc)
25 pub static AVAudioSessionPortLineIn: Option<&'static AVAudioSessionPort>;
26}
27
28extern "C" {
29 /// Built-in microphone on an iOS device
30 ///
31 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportbuiltinmic?language=objc)
32 pub static AVAudioSessionPortBuiltInMic: Option<&'static AVAudioSessionPort>;
33}
34
35extern "C" {
36 /// Microphone on a wired headset. Headset refers to an accessory that has headphone outputs paired with a
37 /// microphone.
38 ///
39 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportheadsetmic?language=objc)
40 pub static AVAudioSessionPortHeadsetMic: Option<&'static AVAudioSessionPort>;
41}
42
43extern "C" {
44 /// Line level output on a dock connector
45 ///
46 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportlineout?language=objc)
47 pub static AVAudioSessionPortLineOut: Option<&'static AVAudioSessionPort>;
48}
49
50extern "C" {
51 /// Headphone or headset output
52 ///
53 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportheadphones?language=objc)
54 pub static AVAudioSessionPortHeadphones: Option<&'static AVAudioSessionPort>;
55}
56
57extern "C" {
58 /// Output on a Bluetooth A2DP device
59 ///
60 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportbluetootha2dp?language=objc)
61 pub static AVAudioSessionPortBluetoothA2DP: Option<&'static AVAudioSessionPort>;
62}
63
64extern "C" {
65 /// The speaker you hold to your ear when on a phone call
66 ///
67 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportbuiltinreceiver?language=objc)
68 pub static AVAudioSessionPortBuiltInReceiver: Option<&'static AVAudioSessionPort>;
69}
70
71extern "C" {
72 /// Built-in speaker on an iOS device
73 ///
74 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportbuiltinspeaker?language=objc)
75 pub static AVAudioSessionPortBuiltInSpeaker: Option<&'static AVAudioSessionPort>;
76}
77
78extern "C" {
79 /// Output via High-Definition Multimedia Interface
80 ///
81 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionporthdmi?language=objc)
82 pub static AVAudioSessionPortHDMI: Option<&'static AVAudioSessionPort>;
83}
84
85extern "C" {
86 /// Output on a remote Air Play device
87 ///
88 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportairplay?language=objc)
89 pub static AVAudioSessionPortAirPlay: Option<&'static AVAudioSessionPort>;
90}
91
92extern "C" {
93 /// Output on a Bluetooth Low Energy device
94 ///
95 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportbluetoothle?language=objc)
96 pub static AVAudioSessionPortBluetoothLE: Option<&'static AVAudioSessionPort>;
97}
98
99extern "C" {
100 /// Input or output on a Bluetooth Hands-Free Profile device
101 ///
102 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportbluetoothhfp?language=objc)
103 pub static AVAudioSessionPortBluetoothHFP: Option<&'static AVAudioSessionPort>;
104}
105
106extern "C" {
107 /// Input or output on a Universal Serial Bus device
108 ///
109 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportusbaudio?language=objc)
110 pub static AVAudioSessionPortUSBAudio: Option<&'static AVAudioSessionPort>;
111}
112
113extern "C" {
114 /// Input or output via Car Audio
115 ///
116 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportcaraudio?language=objc)
117 pub static AVAudioSessionPortCarAudio: Option<&'static AVAudioSessionPort>;
118}
119
120extern "C" {
121 /// Input or output that does not correspond to real audio hardware
122 ///
123 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportvirtual?language=objc)
124 pub static AVAudioSessionPortVirtual: Option<&'static AVAudioSessionPort>;
125}
126
127extern "C" {
128 /// Input or output connected via the PCI (Peripheral Component Interconnect) bus
129 ///
130 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportpci?language=objc)
131 pub static AVAudioSessionPortPCI: Option<&'static AVAudioSessionPort>;
132}
133
134extern "C" {
135 /// Input or output connected via FireWire
136 ///
137 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportfirewire?language=objc)
138 pub static AVAudioSessionPortFireWire: Option<&'static AVAudioSessionPort>;
139}
140
141extern "C" {
142 /// Input or output connected via DisplayPort
143 ///
144 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportdisplayport?language=objc)
145 pub static AVAudioSessionPortDisplayPort: Option<&'static AVAudioSessionPort>;
146}
147
148extern "C" {
149 /// Input or output connected via AVB (Audio Video Bridging)
150 ///
151 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportavb?language=objc)
152 pub static AVAudioSessionPortAVB: Option<&'static AVAudioSessionPort>;
153}
154
155extern "C" {
156 /// Input or output connected via Thunderbolt
157 ///
158 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportthunderbolt?language=objc)
159 pub static AVAudioSessionPortThunderbolt: Option<&'static AVAudioSessionPort>;
160}
161
162/// A category defines a broad set of behaviors for a session.
163///
164/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioncategory?language=objc)
165// NS_TYPED_ENUM
166pub type AVAudioSessionCategory = NSString;
167
168extern "C" {
169 /// Use this category for background sounds such as rain, car engine noise, etc.
170 /// Mixes with other music.
171 ///
172 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioncategoryambient?language=objc)
173 pub static AVAudioSessionCategoryAmbient: Option<&'static AVAudioSessionCategory>;
174}
175
176extern "C" {
177 /// Use this category for background sounds. Other music will stop playing.
178 ///
179 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioncategorysoloambient?language=objc)
180 pub static AVAudioSessionCategorySoloAmbient: Option<&'static AVAudioSessionCategory>;
181}
182
183extern "C" {
184 /// Use this category for music tracks.
185 ///
186 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioncategoryplayback?language=objc)
187 pub static AVAudioSessionCategoryPlayback: Option<&'static AVAudioSessionCategory>;
188}
189
190extern "C" {
191 /// Use this category when recording audio.
192 ///
193 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioncategoryrecord?language=objc)
194 pub static AVAudioSessionCategoryRecord: Option<&'static AVAudioSessionCategory>;
195}
196
197extern "C" {
198 /// Use this category when recording and playing back audio.
199 ///
200 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioncategoryplayandrecord?language=objc)
201 pub static AVAudioSessionCategoryPlayAndRecord: Option<&'static AVAudioSessionCategory>;
202}
203
204extern "C" {
205 /// Use this category when using a hardware codec or signal processor while
206 /// not playing or recording audio.
207 ///
208 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioncategoryaudioprocessing?language=objc)
209 #[deprecated = "No longer supported"]
210 pub static AVAudioSessionCategoryAudioProcessing: Option<&'static AVAudioSessionCategory>;
211}
212
213extern "C" {
214 /// Use this category to customize the usage of available audio accessories and built-in audio hardware.
215 /// For example, this category provides an application with the ability to use an available USB output
216 /// and headphone output simultaneously for separate, distinct streams of audio data. Use of
217 /// this category by an application requires a more detailed knowledge of, and interaction with,
218 /// the capabilities of the available audio routes. May be used for input, output, or both.
219 /// Note that not all output types and output combinations are eligible for multi-route. Input is limited
220 /// to the last-in input port. Eligible inputs consist of the following:
221 /// AVAudioSessionPortUSBAudio, AVAudioSessionPortHeadsetMic, and AVAudioSessionPortBuiltInMic.
222 /// Eligible outputs consist of the following:
223 /// AVAudioSessionPortUSBAudio, AVAudioSessionPortLineOut, AVAudioSessionPortHeadphones, AVAudioSessionPortHDMI,
224 /// and AVAudioSessionPortBuiltInSpeaker.
225 /// Note that AVAudioSessionPortBuiltInSpeaker is only allowed to be used when there are no other eligible
226 /// outputs connected.
227 ///
228 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioncategorymultiroute?language=objc)
229 pub static AVAudioSessionCategoryMultiRoute: Option<&'static AVAudioSessionCategory>;
230}
231
232/// Modes modify the audio category in order to introduce behavior that is tailored to the specific
233/// use of audio within an application. Available in iOS 5.0 and greater.
234///
235/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmode?language=objc)
236// NS_TYPED_ENUM
237pub type AVAudioSessionMode = NSString;
238
239extern "C" {
240 /// The default mode
241 ///
242 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmodedefault?language=objc)
243 pub static AVAudioSessionModeDefault: Option<&'static AVAudioSessionMode>;
244}
245
246extern "C" {
247 /// Only valid with AVAudioSessionCategoryPlayAndRecord. Appropriate for Voice over IP
248 /// (VoIP) applications. Reduces the number of allowable audio routes to be only those
249 /// that are appropriate for VoIP applications and may engage appropriate system-supplied
250 /// signal processing. Has the side effect of setting AVAudioSessionCategoryOptionAllowBluetoothHFP.
251 /// Using this mode without the VoiceProcessing IO unit or AVAudioEngine with voice processing enabled will result in the following:
252 /// - Chat-specific signal processing such as echo cancellation or automatic gain correction will not be loaded
253 /// - Dynamic processing on input and output will be disabled resulting in a lower output playback level.
254 ///
255 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmodevoicechat?language=objc)
256 pub static AVAudioSessionModeVoiceChat: Option<&'static AVAudioSessionMode>;
257}
258
259extern "C" {
260 /// Set by Game Kit on behalf of an application that uses a GKVoiceChat object; valid
261 /// only with the AVAudioSessionCategoryPlayAndRecord category.
262 /// Do not set this mode directly. If you need similar behavior and are not using
263 /// a GKVoiceChat object, use AVAudioSessionModeVoiceChat instead.
264 ///
265 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmodegamechat?language=objc)
266 pub static AVAudioSessionModeGameChat: Option<&'static AVAudioSessionMode>;
267}
268
269extern "C" {
270 /// Only valid with AVAudioSessionCategoryPlayAndRecord or AVAudioSessionCategoryRecord.
271 /// Modifies the audio routing options and may engage appropriate system-supplied signal processing.
272 ///
273 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmodevideorecording?language=objc)
274 pub static AVAudioSessionModeVideoRecording: Option<&'static AVAudioSessionMode>;
275}
276
277extern "C" {
278 /// Appropriate for applications that wish to minimize the effect of system-supplied signal
279 /// processing for input and/or output audio signals.
280 /// This mode disables some dynamics processing on input and output resulting in a lower output playback level.
281 ///
282 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmodemeasurement?language=objc)
283 pub static AVAudioSessionModeMeasurement: Option<&'static AVAudioSessionMode>;
284}
285
286extern "C" {
287 /// Appropriate for applications playing movie content. Only valid with AVAudioSessionCategoryPlayback.
288 /// Setting this mode engages appropriate output signal processing for movie playback scenarios.
289 /// Content using this mode is eligible for Enhance Dialogue processing on supported routes with capable hardware
290 ///
291 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmodemovieplayback?language=objc)
292 pub static AVAudioSessionModeMoviePlayback: Option<&'static AVAudioSessionMode>;
293}
294
295extern "C" {
296 /// Only valid with kAudioSessionCategory_PlayAndRecord. Reduces the number of allowable audio
297 /// routes to be only those that are appropriate for video chat applications. May engage appropriate
298 /// system-supplied signal processing. Has the side effect of setting
299 /// AVAudioSessionCategoryOptionAllowBluetoothHFP and AVAudioSessionCategoryOptionDefaultToSpeaker.
300 /// Using this mode without the VoiceProcessing IO unit or AVAudioEngine with voice processing enabled will result in the following:
301 /// - Chat-specific signal processing such as echo cancellation or automatic gain correction will not be loaded
302 /// - Dynamic processing on input and output will be disabled resulting in a lower output playback level.
303 ///
304 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmodevideochat?language=objc)
305 pub static AVAudioSessionModeVideoChat: Option<&'static AVAudioSessionMode>;
306}
307
308extern "C" {
309 /// Appropriate for applications which play spoken audio and wish to be paused (via audio session interruption) rather than ducked
310 /// if another app (such as a navigation app) plays a spoken audio prompt. Examples of apps that would use this are podcast players and
311 /// audio books. For more information, see the related category option AVAudioSessionCategoryOptionInterruptSpokenAudioAndMixWithOthers.
312 ///
313 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmodespokenaudio?language=objc)
314 pub static AVAudioSessionModeSpokenAudio: Option<&'static AVAudioSessionMode>;
315}
316
317extern "C" {
318 /// Appropriate for applications which play audio using text to speech. Setting this mode allows for different routing behaviors when
319 /// connected to certain audio devices such as CarPlay. An example of an app that would use this mode is a turn by turn navigation app that
320 /// plays short prompts to the user. Typically, these same types of applications would also configure their session to use
321 /// AVAudioSessionCategoryOptionDuckOthers and AVAudioSessionCategoryOptionInterruptSpokenAudioAndMixWithOthers
322 ///
323 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmodevoiceprompt?language=objc)
324 pub static AVAudioSessionModeVoicePrompt: Option<&'static AVAudioSessionMode>;
325}
326
327extern "C" {
328 /// Appropriate for applications playing short-form video content.
329 ///
330 /// Only valid with ``AVAudioSessionCategoryPlayback``.
331 /// Not applicable with ``AVAudioSessionRouteSharingPolicy/AVAudioSessionRouteSharingPolicyLongFormAudio``,
332 /// or ``AVAudioSessionRouteSharingPolicy/AVAudioSessionRouteSharingPolicyLongFormVideo``.
333 ///
334 /// When this mode is set:
335 /// - system will make informed decisions to automatically unmute the output of the media if the user shows intention of unmuting.
336 /// - When auto-unmuted, ``AVAudioSessionUserIntentToUnmuteOutputNotification`` and ``AVAudioSessionOutputMuteStateChangeNotification`` will be sent.
337 /// - if the session is output muted, system may prevent interrupting other active audio apps.
338 ///
339 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmodeshortformvideo?language=objc)
340 pub static AVAudioSessionModeShortFormVideo: Option<&'static AVAudioSessionMode>;
341}
342
343extern "C" {
344 /// Notification sent to registered listeners when the system has interrupted the audio
345 /// session and when the interruption has ended.
346 ///
347 /// Check the notification's userInfo dictionary for the interruption type, which is either
348 /// Begin or End. In the case of an end interruption notification, check the userInfo dictionary
349 /// for AVAudioSessionInterruptionOptions that indicate whether audio playback should resume.
350 /// In the case of a begin interruption notification, the reason for the interruption can be found
351 /// within the info dictionary under the key AVAudioSessionInterruptionReasonKey.
352 ///
353 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioninterruptionnotification?language=objc)
354 pub static AVAudioSessionInterruptionNotification: Option<&'static NSNotificationName>;
355}
356
357extern "C" {
358 /// Notification sent to registered listeners when an audio route change has occurred.
359 ///
360 /// Check the notification's userInfo dictionary for the route change reason and for a description
361 /// of the previous audio route.
362 ///
363 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionroutechangenotification?language=objc)
364 pub static AVAudioSessionRouteChangeNotification: Option<&'static NSNotificationName>;
365}
366
367extern "C" {
368 /// Notification sent to registered listeners if the media server is killed.
369 ///
370 /// In the event that the server is killed, take appropriate steps to handle requests that come in
371 /// before the server resets. See Technical Q
372 /// &A
373 /// QA1749.
374 ///
375 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmediaserviceswerelostnotification?language=objc)
376 pub static AVAudioSessionMediaServicesWereLostNotification: Option<&'static NSNotificationName>;
377}
378
379extern "C" {
380 /// Notification sent to registered listeners when the media server restarts.
381 ///
382 /// In the event that the server restarts, take appropriate steps to re-initialize any audio objects
383 /// used by your application. See Technical Q
384 /// &A
385 /// QA1749.
386 ///
387 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmediaserviceswereresetnotification?language=objc)
388 pub static AVAudioSessionMediaServicesWereResetNotification:
389 Option<&'static NSNotificationName>;
390}
391
392extern "C" {
393 /// Notification sent to registered listeners when they are in the foreground with an active
394 /// audio session and primary audio from other applications starts and stops.
395 ///
396 /// Check the notification's userInfo dictionary for the notification type, which is either Begin or
397 /// End. Foreground applications may use this notification as a hint to enable or disable audio that
398 /// is secondary to the functionality of the application. For more information, see the related
399 /// property secondaryAudioShouldBeSilencedHint.
400 ///
401 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionsilencesecondaryaudiohintnotification?language=objc)
402 pub static AVAudioSessionSilenceSecondaryAudioHintNotification:
403 Option<&'static NSNotificationName>;
404}
405
406extern "C" {
407 /// Notification sent to registered listeners when spatial playback capabilities are changed due to a
408 /// change in user preference.
409 ///
410 /// Check the notification's userInfo dictionary for AVAudioSessionSpatialAudioEnabledKey to check if spatial
411 /// audio is enabled.
412 ///
413 /// Observers of this notification should also observe AVAudioSessionRouteChangeNotification since a route change
414 /// may also result in a change in the ability for the system to play spatial audio. Use
415 /// AVAudioSessionPortDescription's isSpatialAudioEnabled property to check if the current route supports
416 /// spatialized playback.
417 ///
418 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionspatialplaybackcapabilitieschangednotification?language=objc)
419 pub static AVAudioSessionSpatialPlaybackCapabilitiesChangedNotification:
420 Option<&'static NSNotificationName>;
421}
422
423extern "C" {
424 /// Notification sent to registered listeners when the resolved rendering mode changes.
425 ///
426 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionrenderingmodechangenotification?language=objc)
427 pub static AVAudioSessionRenderingModeChangeNotification: Option<&'static NSNotificationName>;
428}
429
430extern "C" {
431 /// Notification sent to registered listeners when the rendering capabilities change.
432 ///
433 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionrenderingcapabilitieschangenotification?language=objc)
434 pub static AVAudioSessionRenderingCapabilitiesChangeNotification:
435 Option<&'static NSNotificationName>;
436}
437
438extern "C" {
439 /// Notification sent to registered listeners when the system's capability to inject audio into input stream is changed
440 ///
441 /// Check the notification's userInfo dictionary for AVAudioSessionMicrophoneInjectionIsAvailableKey to check if microphone
442 /// injection is available. Use AVAudioSession's isMicrophoneInjectionAvailable property to check if microphone injection is available
443 ///
444 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmicrophoneinjectioncapabilitieschangenotification?language=objc)
445 pub static AVAudioSessionMicrophoneInjectionCapabilitiesChangeNotification:
446 Option<&'static NSNotificationName>;
447}
448
449extern "C" {
450 /// Notification sent to registered listeners when session's output mute state changes.
451 ///
452 /// The userInfo dictionary will contain the updated output mute value as accessed by ``AVAudioSessionMuteStateKey``
453 ///
454 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionoutputmutestatechangenotification?language=objc)
455 pub static AVAudioSessionOutputMuteStateChangeNotification: Option<&'static NSNotificationName>;
456}
457
458extern "C" {
459 /// Keys for ``AVAudioSessionOutputMuteStateChangeNotification``
460 /// Value is `NSNumber` type with boolean value 0 for unmuted or value 1 for muted (samples zeroed out)
461 ///
462 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmutestatekey?language=objc)
463 pub static AVAudioSessionMuteStateKey: Option<&'static NSString>;
464}
465
466extern "C" {
467 /// Notification sent to registered listeners when the application's output is muted and user hints to unmute.
468 ///
469 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionuserintenttounmuteoutputnotification?language=objc)
470 pub static AVAudioSessionUserIntentToUnmuteOutputNotification:
471 Option<&'static NSNotificationName>;
472}
473
474extern "C" {
475 /// keys for AVAudioSessionSpatialPlaybackCapabilitiesChangedNotification
476 /// value is an NSNumber whose boolean value indicates if spatial audio enabled.
477 ///
478 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionspatialaudioenabledkey?language=objc)
479 pub static AVAudioSessionSpatialAudioEnabledKey: Option<&'static NSString>;
480}
481
482extern "C" {
483 /// keys for AVAudioSessionInterruptionNotification
484 /// Value is an NSNumber representing an AVAudioSessionInterruptionType
485 ///
486 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioninterruptiontypekey?language=objc)
487 pub static AVAudioSessionInterruptionTypeKey: Option<&'static NSString>;
488}
489
490extern "C" {
491 /// Only present for end interruption events. Value is of type AVAudioSessionInterruptionOptions.
492 ///
493 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioninterruptionoptionkey?language=objc)
494 pub static AVAudioSessionInterruptionOptionKey: Option<&'static NSString>;
495}
496
497extern "C" {
498 /// Only present in begin interruption events. Value is of type AVAudioSessionInterruptionReason.
499 ///
500 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioninterruptionreasonkey?language=objc)
501 pub static AVAudioSessionInterruptionReasonKey: Option<&'static NSString>;
502}
503
504extern "C" {
505 /// Only present in begin interruption events, where the interruption is a direct result of the
506 /// application being suspended by the operating sytem. Value is a boolean NSNumber, where a true
507 /// value indicates that the interruption is the result of the application being suspended, rather
508 /// than being interrupted by another audio session.
509 ///
510 /// Starting in iOS 10, the system will deactivate the audio session of most apps in response to the
511 /// app process being suspended. When the app starts running again, it will receive the notification
512 /// that its session has been deactivated by the system. Note that the notification is necessarily
513 /// delayed in time, due to the fact that the application was suspended at the time the session was
514 /// deactivated by the system and the notification can only be delivered once the app is running
515 /// again.
516 ///
517 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioninterruptionwassuspendedkey?language=objc)
518 #[deprecated = "No longer supported - see AVAudioSessionInterruptionReasonKey"]
519 pub static AVAudioSessionInterruptionWasSuspendedKey: Option<&'static NSString>;
520}
521
522extern "C" {
523 /// keys for AVAudioSessionRouteChangeNotification
524 /// value is an NSNumber representing an AVAudioSessionRouteChangeReason
525 ///
526 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionroutechangereasonkey?language=objc)
527 pub static AVAudioSessionRouteChangeReasonKey: Option<&'static NSString>;
528}
529
530extern "C" {
531 /// value is AVAudioSessionRouteDescription *
532 ///
533 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionroutechangepreviousroutekey?language=objc)
534 pub static AVAudioSessionRouteChangePreviousRouteKey: Option<&'static NSString>;
535}
536
537extern "C" {
538 /// keys for AVAudioSessionSilenceSecondaryAudioHintNotification
539 /// value is an NSNumber representing an AVAudioSessionSilenceSecondaryAudioHintType
540 ///
541 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionsilencesecondaryaudiohinttypekey?language=objc)
542 pub static AVAudioSessionSilenceSecondaryAudioHintTypeKey: Option<&'static NSString>;
543}
544
545extern "C" {
546 /// keys for AVAudioSessionRenderingModeChangeNotification
547 /// Contains a payload of NSInteger representing the new resolved rendering mode
548 ///
549 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionrenderingmodenewrenderingmodekey?language=objc)
550 pub static AVAudioSessionRenderingModeNewRenderingModeKey: Option<&'static NSString>;
551}
552
553extern "C" {
554 /// Keys for AVAudioSessionMicrophoneInjectionCapabilitiesChangeNotification
555 ///
556 /// Indicates if microphone injection is available.
557 /// Value is an NSNumber whose boolean value indicates if microphone injection is available.
558 ///
559 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmicrophoneinjectionisavailablekey?language=objc)
560 pub static AVAudioSessionMicrophoneInjectionIsAvailableKey: Option<&'static NSString>;
561}
562
563extern "C" {
564 /// Notification sent to registered listeners when there are changes in ``availableInputs``.
565 ///
566 /// There is no payload (userInfo dictionary) associated with the ``AVAudioSessionAvailableInputsChangeNotification`` notification.
567 ///
568 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionavailableinputschangenotification?language=objc)
569 pub static AVAudioSessionAvailableInputsChangeNotification: Option<&'static NSNotificationName>;
570}
571
572/// For use with activateWithOptions:completionHandler:
573///
574/// Reserved for future use. Added in watchOS 5.0.
575///
576/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionactivationoptions?language=objc)
577// NS_OPTIONS
578#[repr(transparent)]
579#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
580pub struct AVAudioSessionActivationOptions(pub NSUInteger);
581bitflags::bitflags! {
582 impl AVAudioSessionActivationOptions: NSUInteger {
583 #[doc(alias = "AVAudioSessionActivationOptionNone")]
584 const None = 0;
585 }
586}
587
588unsafe impl Encode for AVAudioSessionActivationOptions {
589 const ENCODING: Encoding = NSUInteger::ENCODING;
590}
591
592unsafe impl RefEncode for AVAudioSessionActivationOptions {
593 const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
594}
595
596/// For use with overrideOutputAudioPort:error:
597///
598/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportoverride?language=objc)
599// NS_ENUM
600#[repr(transparent)]
601#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
602pub struct AVAudioSessionPortOverride(pub NSUInteger);
603impl AVAudioSessionPortOverride {
604 /// No override. Return audio routing to the default state for the current audio category.
605 #[doc(alias = "AVAudioSessionPortOverrideNone")]
606 pub const None: Self = Self(0);
607 /// Route audio output to speaker. Use this override with AVAudioSessionCategoryPlayAndRecord,
608 /// which by default routes the output to the receiver.
609 #[doc(alias = "AVAudioSessionPortOverrideSpeaker")]
610 pub const Speaker: Self = Self(0x73706b72);
611}
612
613unsafe impl Encode for AVAudioSessionPortOverride {
614 const ENCODING: Encoding = NSUInteger::ENCODING;
615}
616
617unsafe impl RefEncode for AVAudioSessionPortOverride {
618 const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
619}
620
621/// Values for AVAudioSessionRouteChangeReasonKey in AVAudioSessionRouteChangeNotification's
622/// userInfo dictionary
623///
624/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionroutechangereason?language=objc)
625// NS_ENUM
626#[repr(transparent)]
627#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
628pub struct AVAudioSessionRouteChangeReason(pub NSUInteger);
629impl AVAudioSessionRouteChangeReason {
630 /// The reason is unknown.
631 #[doc(alias = "AVAudioSessionRouteChangeReasonUnknown")]
632 pub const Unknown: Self = Self(0);
633 /// A new device became available (e.g. headphones have been plugged in).
634 #[doc(alias = "AVAudioSessionRouteChangeReasonNewDeviceAvailable")]
635 pub const NewDeviceAvailable: Self = Self(1);
636 /// The old device became unavailable (e.g. headphones have been unplugged).
637 #[doc(alias = "AVAudioSessionRouteChangeReasonOldDeviceUnavailable")]
638 pub const OldDeviceUnavailable: Self = Self(2);
639 /// The audio category has changed (e.g. AVAudioSessionCategoryPlayback has been changed to
640 /// AVAudioSessionCategoryPlayAndRecord).
641 #[doc(alias = "AVAudioSessionRouteChangeReasonCategoryChange")]
642 pub const CategoryChange: Self = Self(3);
643 /// The route has been overridden (e.g. category is AVAudioSessionCategoryPlayAndRecord and
644 /// the output has been changed from the receiver, which is the default, to the speaker).
645 #[doc(alias = "AVAudioSessionRouteChangeReasonOverride")]
646 pub const Override: Self = Self(4);
647 /// The device woke from sleep.
648 #[doc(alias = "AVAudioSessionRouteChangeReasonWakeFromSleep")]
649 pub const WakeFromSleep: Self = Self(6);
650 /// Returned when there is no route for the current category (for instance, the category is
651 /// AVAudioSessionCategoryRecord but no input device is available).
652 #[doc(alias = "AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory")]
653 pub const NoSuitableRouteForCategory: Self = Self(7);
654 /// Indicates that the set of input and/our output ports has not changed, but some aspect of
655 /// their configuration has changed. For example, a port's selected data source has changed.
656 /// (Introduced in iOS 7.0, watchOS 2.0, tvOS 9.0).
657 #[doc(alias = "AVAudioSessionRouteChangeReasonRouteConfigurationChange")]
658 pub const RouteConfigurationChange: Self = Self(8);
659}
660
661unsafe impl Encode for AVAudioSessionRouteChangeReason {
662 const ENCODING: Encoding = NSUInteger::ENCODING;
663}
664
665unsafe impl RefEncode for AVAudioSessionRouteChangeReason {
666 const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
667}
668
669/// Customization of various aspects of a category's behavior.
670/// Use with ``AVAudioSession/setCategory:mode:options:error:``.
671///
672/// Applications must be prepared for changing category options to fail as behavior may change
673/// in future releases. If an application changes its category, it should reassert the options,
674/// since they are not sticky across category changes. Introduced in iOS 6.0 / watchOS 2.0 /
675/// tvOS 9.0.
676///
677/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioncategoryoptions?language=objc)
678// NS_OPTIONS
679#[repr(transparent)]
680#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
681pub struct AVAudioSessionCategoryOptions(pub NSUInteger);
682bitflags::bitflags! {
683 impl AVAudioSessionCategoryOptions: NSUInteger {
684/// Controls whether other active audio apps will be interrupted or mixed with when your app's
685/// audio session goes active. Details depend on the category.
686///
687/// - ``AVAudioSessionCategoryPlayAndRecord`` or ``AVAudioSessionCategoryMultiRoute``:
688/// MixWithOthers defaults to false, but can be set to true, allowing other applications to
689/// play in the background while your app has both audio input and output enabled.
690///
691/// - ``AVAudioSessionCategoryPlayback``:
692/// MixWithOthers defaults to false, but can be set to true, allowing other applications to
693/// play in the background. Your app will still be able to play regardless of the setting
694/// of the ringer switch.
695///
696/// - Other categories:
697/// MixWithOthers defaults to false and cannot be changed.
698///
699/// MixWithOthers is only valid with ``AVAudioSessionCategoryPlayAndRecord``,
700/// ``AVAudioSessionCategoryPlayback``, and ``AVAudioSessionCategoryMultiRoute``.
701 #[doc(alias = "AVAudioSessionCategoryOptionMixWithOthers")]
702 const MixWithOthers = 0x1;
703/// Controls whether or not other active audio apps will be ducked when when your app's audio
704/// session goes active. An example of this is a workout app, which provides periodic updates to
705/// the user. It reduces the volume of any music currently being played while it provides its
706/// status.
707///
708/// Defaults to off. Note that the other audio will be ducked for as long as the current session
709/// is active. You will need to deactivate your audio session when you want to restore full
710/// volume playback (un-duck) other sessions.
711///
712/// Setting this option will also make your session mixable with others
713/// (``AVAudioSessionCategoryOptionMixWithOthers`` will be set).
714///
715/// DuckOthers is only valid with ``AVAudioSessionCategoryAmbient``,
716/// ``AVAudioSessionCategoryPlayAndRecord``, ``AVAudioSessionCategoryPlayback``, and
717/// ``AVAudioSessionCategoryMultiRoute``.
718 #[doc(alias = "AVAudioSessionCategoryOptionDuckOthers")]
719 const DuckOthers = 0x2;
720/// Deprecated - please see ``AVAudioSessionCategoryOptionAllowBluetoothHFP``
721 #[doc(alias = "AVAudioSessionCategoryOptionAllowBluetooth")]
722#[deprecated]
723 const AllowBluetooth = 0x4;
724/// Allows an application to change the default behavior of some audio session categories with
725/// regard to whether Bluetooth Hands-Free Profile (HFP) devices are available for routing. The
726/// exact behavior depends on the category.
727///
728/// - ``AVAudioSessionCategoryPlayAndRecord``:
729/// AllowBluetoothHFP defaults to false, but can be set to true, allowing a paired bluetooth
730/// HFP device to appear as an available route for input, while playing through the
731/// category-appropriate output.
732///
733/// - ``AVAudioSessionCategoryRecord``:
734/// AllowBluetoothHFP defaults to false, but can be set to true, allowing a paired Bluetooth
735/// HFP device to appear as an available route for input.
736///
737/// - Other categories:
738/// AllowBluetoothHFP defaults to false and cannot be changed. Enabling Bluetooth for input in
739/// these categories is not allowed.
740 #[doc(alias = "AVAudioSessionCategoryOptionAllowBluetoothHFP")]
741 const AllowBluetoothHFP = 0x4;
742/// Allows an application to change the default behavior of some audio session categories with
743/// regard to the audio route. The exact behavior depends on the category.
744///
745/// - ``AVAudioSessionCategoryPlayAndRecord``:
746/// DefaultToSpeaker will default to false, but can be set to true, routing to Speaker
747/// (instead of Receiver) when no other audio route is connected.
748///
749/// - Other categories:
750/// DefaultToSpeaker is always false and cannot be changed.
751 #[doc(alias = "AVAudioSessionCategoryOptionDefaultToSpeaker")]
752 const DefaultToSpeaker = 0x8;
753/// When a session with InterruptSpokenAudioAndMixWithOthers set goes active, then if there is
754/// another playing app whose session mode is ``AVAudioSessionModeSpokenAudio`` (for podcast
755/// playback in the background, for example), then the spoken-audio session will be
756/// interrupted. A good use of this is for a navigation app that provides prompts to its user:
757/// it pauses any spoken audio currently being played while it plays the prompt.
758///
759/// InterruptSpokenAudioAndMixWithOthers defaults to off. Note that the other app's audio will
760/// be paused for as long as the current session is active. You will need to deactivate your
761/// audio session to allow the other session to resume playback. Setting this option will also
762/// make your category mixable with others (``AVAudioSessionCategoryOptionMixWithOthers``
763/// will be set). If you want other non-spoken audio apps to duck their audio when your app's session
764/// goes active, also set ``AVAudioSessionCategoryOptionDuckOthers``.
765///
766/// Only valid with ``AVAudioSessionCategoryPlayAndRecord``,
767/// ``AVAudioSessionCategoryPlayback``, and ``AVAudioSessionCategoryMultiRoute``.
768 #[doc(alias = "AVAudioSessionCategoryOptionInterruptSpokenAudioAndMixWithOthers")]
769 const InterruptSpokenAudioAndMixWithOthers = 0x11;
770/// Allows an application to change the default behavior of some audio session categories with
771/// regard to whether Bluetooth Advanced Audio Distribution Profile (A2DP) devices are
772/// available for routing. The exact behavior depends on the category.
773///
774/// - ``AVAudioSessionCategoryPlayAndRecord``:
775/// AllowBluetoothA2DP defaults to false, but can be set to true, allowing a paired
776/// Bluetooth A2DP device to appear as an available route for output, while recording
777/// through the category-appropriate input.
778///
779/// - ``AVAudioSessionCategoryMultiRoute`` and ``AVAudioSessionCategoryRecord``:
780/// AllowBluetoothA2DP is false, and cannot be set to true.
781///
782/// - Other categories:
783/// AllowBluetoothA2DP is always implicitly true and cannot be changed; Bluetooth A2DP ports
784/// are always supported in output-only categories.
785///
786/// Setting both ``AVAudioSessionCategoryOptionAllowBluetoothHFP``
787/// and ``AVAudioSessionCategoryOptionAllowBluetoothA2DP`` is
788/// allowed. In cases where a single Bluetooth device supports both HFP and A2DP, the HFP
789/// ports will be given a higher priority for routing. For HFP and A2DP ports on separate
790/// hardware devices, the last-in wins rule applies.
791 #[doc(alias = "AVAudioSessionCategoryOptionAllowBluetoothA2DP")]
792 const AllowBluetoothA2DP = 0x20;
793/// Allows an application to change the default behavior of some audio session categories
794/// with regard to showing AirPlay devices as available routes. This option applies to
795/// various categories in the same way as ``AVAudioSessionCategoryOptionAllowBluetoothA2DP``;
796/// see above for details. Only valid with ``AVAudioSessionCategoryPlayAndRecord``.
797 #[doc(alias = "AVAudioSessionCategoryOptionAllowAirPlay")]
798 const AllowAirPlay = 0x40;
799/// Some devices include a privacy feature that mutes the built-in microphone at a hardware level
800/// under certain conditions e.g. when the Smart Folio of an iPad is closed. The default behavior is
801/// to interrupt the session using the built-in microphone when that microphone is muted in hardware.
802/// This option allows an application to opt out of the default behavior if it is using a category that
803/// supports both input and output, such as ``AVAudioSessionCategoryPlayAndRecord``, and wants to
804/// allow its session to stay activated even when the microphone has been muted. The result would be
805/// that playback continues as normal, and microphone sample buffers would continue to be produced
806/// but all microphone samples would have a value of zero.
807///
808/// This may be useful if an application knows that it wants to allow playback to continue and
809/// recording/monitoring a muted microphone will not lead to a poor user experience. Attempting to use
810/// this option with a session category that doesn't support the use of audio input will result in an error.
811///
812/// - Note Under the default policy, a session will be interrupted if it is running input at the time when
813/// the microphone is muted in hardware. Similarly, attempting to start input when the microphone is
814/// muted will fail.
815/// - Note This option has no relation to the recordPermission property, which indicates whether or
816/// not the user has granted permission to use microphone input.
817 #[doc(alias = "AVAudioSessionCategoryOptionOverrideMutedMicrophoneInterruption")]
818 const OverrideMutedMicrophoneInterruption = 0x80;
819/// When this option is specified with a category that supports both input and output, the session
820/// will enable full-bandwidth audio in both input
821/// &
822/// output directions, if the Bluetooth route supports
823/// it (e.g. certain AirPods models). It is currently compatible only with mode ``AVAudioSessionModeDefault``.
824///
825/// - Support for this can be queried on input ports via the BluetoothMicrophone interface on a port,
826/// via its member `highQualityRecording.isSupported`.
827///
828/// - Active sessions can see if full-bandwidth Bluetooth audio was successfully enabled by querying
829/// the BluetoothMicrophone interface of the input port of the current route for:
830/// `highQualityRecording.isEnabled`.
831///
832/// - When this option is provided alone, it will be enabled if the route supports it, otherwise the option
833/// will be ignored. This option may be combined with ``AVAudioSessionCategoryOptionAllowBluetoothHFP``,
834/// in which case HFP will be used as a fallback if the route does not support this
835/// ``AVAudioSessionCategoryOptionBluetoothHighQualityRecording`` option.
836///
837/// - Note This option may increase input latency when enabled and is therefore not recommended for
838/// real-time communication usage.
839/// - Note Apps using ``AVAudioSessionCategoryOptionBluetoothHighQualityRecording``
840/// may want to consider setting ``AVAudioSession/setPrefersNoInterruptionsFromSystemAlerts:error:``
841/// while recording, to avoid the recording session being interrupted by an incoming call ringtone.
842 #[doc(alias = "AVAudioSessionCategoryOptionBluetoothHighQualityRecording")]
843 const BluetoothHighQualityRecording = 1<<19;
844 }
845}
846
847unsafe impl Encode for AVAudioSessionCategoryOptions {
848 const ENCODING: Encoding = NSUInteger::ENCODING;
849}
850
851unsafe impl RefEncode for AVAudioSessionCategoryOptions {
852 const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
853}
854
855/// Values for AVAudioSessionInterruptionTypeKey in AVAudioSessionInterruptionNotification's
856/// userInfo dictionary.
857///
858/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioninterruptiontype?language=objc)
859// NS_ENUM
860#[repr(transparent)]
861#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
862pub struct AVAudioSessionInterruptionType(pub NSUInteger);
863impl AVAudioSessionInterruptionType {
864 /// the system has interrupted your audio session
865 #[doc(alias = "AVAudioSessionInterruptionTypeBegan")]
866 pub const Began: Self = Self(1);
867 /// the interruption has ended
868 #[doc(alias = "AVAudioSessionInterruptionTypeEnded")]
869 pub const Ended: Self = Self(0);
870}
871
872unsafe impl Encode for AVAudioSessionInterruptionType {
873 const ENCODING: Encoding = NSUInteger::ENCODING;
874}
875
876unsafe impl RefEncode for AVAudioSessionInterruptionType {
877 const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
878}
879
880/// Values for AVAudioSessionInterruptionOptionKey in AVAudioSessionInterruptionNotification's
881/// userInfo dictionary.
882///
883/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioninterruptionoptions?language=objc)
884// NS_OPTIONS
885#[repr(transparent)]
886#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
887pub struct AVAudioSessionInterruptionOptions(pub NSUInteger);
888bitflags::bitflags! {
889 impl AVAudioSessionInterruptionOptions: NSUInteger {
890/// Indicates that you should resume playback now that the interruption has ended.
891 #[doc(alias = "AVAudioSessionInterruptionOptionShouldResume")]
892 const ShouldResume = 1;
893 }
894}
895
896unsafe impl Encode for AVAudioSessionInterruptionOptions {
897 const ENCODING: Encoding = NSUInteger::ENCODING;
898}
899
900unsafe impl RefEncode for AVAudioSessionInterruptionOptions {
901 const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
902}
903
904/// Values for AVAudioSessionInterruptionReasonKey in AVAudioSessionInterruptionNotification's userInfo dictionary.
905///
906///
907/// The audio session was interrupted because another session was activated.
908///
909///
910/// The audio session was interrupted due to the app being suspended by the operating sytem.
911/// Deprecated. Interruption notifications with reason 'wasSuspended' not present from iOS 16 onwards.
912///
913///
914/// The audio session was interrupted due to the built-in mic being muted e.g. due to an iPad's Smart Folio being closed.
915///
916///
917/// The audio session was interrupted due to route getting disconnected.
918///
919///
920/// The audio session was interrupted due to device being doffed or locked.
921///
922/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioninterruptionreason?language=objc)
923// NS_ENUM
924#[repr(transparent)]
925#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
926pub struct AVAudioSessionInterruptionReason(pub NSUInteger);
927impl AVAudioSessionInterruptionReason {
928 #[doc(alias = "AVAudioSessionInterruptionReasonDefault")]
929 pub const Default: Self = Self(0);
930 #[doc(alias = "AVAudioSessionInterruptionReasonAppWasSuspended")]
931 #[deprecated = "wasSuspended reason no longer present"]
932 pub const AppWasSuspended: Self = Self(1);
933 #[doc(alias = "AVAudioSessionInterruptionReasonBuiltInMicMuted")]
934 pub const BuiltInMicMuted: Self = Self(2);
935 /// The audio session was interrupted because route was disconnected.
936 #[doc(alias = "AVAudioSessionInterruptionReasonRouteDisconnected")]
937 pub const RouteDisconnected: Self = Self(4);
938}
939
940unsafe impl Encode for AVAudioSessionInterruptionReason {
941 const ENCODING: Encoding = NSUInteger::ENCODING;
942}
943
944unsafe impl RefEncode for AVAudioSessionInterruptionReason {
945 const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
946}
947
948/// options for use when calling setActive:withOptions:error:
949///
950/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionsetactiveoptions?language=objc)
951// NS_OPTIONS
952#[repr(transparent)]
953#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
954pub struct AVAudioSessionSetActiveOptions(pub NSUInteger);
955bitflags::bitflags! {
956 impl AVAudioSessionSetActiveOptions: NSUInteger {
957/// Notify an interrupted app that the interruption has ended and it may resume playback. Only
958/// valid on session deactivation.
959 #[doc(alias = "AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation")]
960 const NotifyOthersOnDeactivation = 1;
961 }
962}
963
964unsafe impl Encode for AVAudioSessionSetActiveOptions {
965 const ENCODING: Encoding = NSUInteger::ENCODING;
966}
967
968unsafe impl RefEncode for AVAudioSessionSetActiveOptions {
969 const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
970}
971
972/// Values for AVAudioSessionSilenceSecondaryAudioHintTypeKey in
973/// AVAudioSessionSilenceSecondaryAudioHintNotification's userInfo dictionary, to indicate whether
974/// optional secondary audio muting should begin or end.
975///
976/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionsilencesecondaryaudiohinttype?language=objc)
977// NS_ENUM
978#[repr(transparent)]
979#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
980pub struct AVAudioSessionSilenceSecondaryAudioHintType(pub NSUInteger);
981impl AVAudioSessionSilenceSecondaryAudioHintType {
982 /// Another application's primary audio has started.
983 #[doc(alias = "AVAudioSessionSilenceSecondaryAudioHintTypeBegin")]
984 pub const Begin: Self = Self(1);
985 /// Another application's primary audio has stopped.
986 #[doc(alias = "AVAudioSessionSilenceSecondaryAudioHintTypeEnd")]
987 pub const End: Self = Self(0);
988}
989
990unsafe impl Encode for AVAudioSessionSilenceSecondaryAudioHintType {
991 const ENCODING: Encoding = NSUInteger::ENCODING;
992}
993
994unsafe impl RefEncode for AVAudioSessionSilenceSecondaryAudioHintType {
995 const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
996}
997
998/// Values to be used by setAggregatedIOPreference:error: method.
999///
1000/// Starting in iOS 10, applications that use AVCaptureSession on iPads and iPhones that
1001/// support taking Live Photos, will have non-aggregated audio I/O unless the app opts out by
1002/// setting its AVAudioSessionIOType to Aggregated. Non-aggregated audio I/O means that separate
1003/// threads will be used to service audio I/O for input and output directions.
1004///
1005/// Note that in cases where the I/O is not aggregated, the sample rate and IO buffer duration
1006/// properties will map to the output audio device. In this scenario, the input and
1007/// output audio hardware may be running at different sample rates and with different IO buffer
1008/// durations. If your app requires input and output audio to be presented in the same realtime
1009/// I/O callback, or requires that input and output audio have the same sample rate or IO buffer
1010/// duration, or if your app requires the ability to set a preferred sample rate or IO buffer duration
1011/// for audio input, set the AVAudioSessionIOType to Aggregated.
1012///
1013/// Apps that don't use AVCaptureSession and use AVAudioSessionCategoryPlayAndRecord will continue
1014/// to have aggregated audio I/O, as in previous versions of iOS.
1015///
1016///
1017/// The default value. If your app does not use AVCaptureSession or does not have any specific
1018/// requirement for aggregating input and output audio in the same realtime I/O callback, use this
1019/// value. Note that if your app does not use AVCaptureSession, it will get aggregated I/O when using
1020/// AVAudioSessionCategoryPlayAndRecord.
1021///
1022/// If your app does utilize AVCaptureSession, use of this value will allow AVCaptureSession to
1023/// start recording without glitching already running output audio and will allow the system to
1024/// utilize power-saving optimizations.
1025///
1026///
1027/// Use this value if your session uses AVAudioSessionCategoryPlayAndRecord and requires input and
1028/// output audio to be presented in the same realtime I/O callback. For example, if your app will be using
1029/// a RemoteIO with both input and output enabled.
1030///
1031/// Note that your session's preference to use aggregated IO will not be honored if it specifies
1032/// AVAudioSessionCategoryOptionMixWithOthers AND another app's audio session was already active
1033/// with non-mixable, non-aggregated input/output.
1034///
1035/// Added in iOS 10.0. Not applicable on watchos, tvos, macos.
1036///
1037/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioniotype?language=objc)
1038// NS_ENUM
1039#[repr(transparent)]
1040#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
1041pub struct AVAudioSessionIOType(pub NSUInteger);
1042impl AVAudioSessionIOType {
1043 #[doc(alias = "AVAudioSessionIOTypeNotSpecified")]
1044 pub const NotSpecified: Self = Self(0);
1045 #[doc(alias = "AVAudioSessionIOTypeAggregated")]
1046 pub const Aggregated: Self = Self(1);
1047}
1048
1049unsafe impl Encode for AVAudioSessionIOType {
1050 const ENCODING: Encoding = NSUInteger::ENCODING;
1051}
1052
1053unsafe impl RefEncode for AVAudioSessionIOType {
1054 const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
1055}
1056
1057/// Starting in iOS 11, tvOS 11, and watchOS 5, the route sharing policy allows a session
1058/// to specify that its output audio should be routed somewhere other than the default system output,
1059/// when appropriate alternative routes are available.
1060///
1061/// Follow normal rules for routing audio output.
1062///
1063/// Route output to the shared long-form audio output. A session whose primary use case is as a
1064/// music or podcast player may use this value to play to the same output as the built-in Music (iOS),
1065/// Podcasts, or iTunes (macOS) applications. Typically applications that use this policy will also
1066/// want sign up for remote control events as documented in “Event Handling Guide for UIKit Apps”
1067/// and will want to utilize MediaPlayer framework’s MPNowPlayingInfoCenter class. All applications
1068/// on the system that use the long-form audio route sharing policy will have their audio routed to the
1069/// same location.
1070/// Apps running on watchOS using this policy will also be able to play audio in the background,
1071/// as long as an eligible audio route can be activated. Apps running on watchOS using this policy
1072/// must use -activateWithOptions:completionHandler: instead of -setActive:withOptions:error: in
1073/// order to ensure that the user will be given the opportunity to pick an appropriate audio route
1074/// in cases where the system is unable to automatically pick the route.
1075///
1076/// Deprecated. Replaced by AVAudioSessionRouteSharingPolicyLongFormAudio.
1077///
1078/// Applications should not attempt to set this value directly. On iOS, this value will be set by
1079/// the system in cases where route picker UI is used to direct video to a wireless route.
1080///
1081/// Route output to the shared long-form video output. A session whose primary use case is as a
1082/// movie or other long-form video content player may use this value to play to the same output as
1083/// other long-form video content applications such as the built-in TV (iOS) application. Applications
1084/// that use this policy will also want to also set the AVInitialRouteSharingPolicy key
1085/// in their Info.plist to "LongFormVideo". All applications on the system that use the long-form video
1086/// route sharing policy will have their audio and video routed to the same location (e.g. AppleTV when
1087/// an AirPlay route is selected). Video content not using this route sharing policy will remain local
1088/// to the playback device even when long form video content is being routed to AirPlay.
1089///
1090/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionroutesharingpolicy?language=objc)
1091// NS_ENUM
1092#[repr(transparent)]
1093#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
1094pub struct AVAudioSessionRouteSharingPolicy(pub NSUInteger);
1095impl AVAudioSessionRouteSharingPolicy {
1096 #[doc(alias = "AVAudioSessionRouteSharingPolicyDefault")]
1097 pub const Default: Self = Self(0);
1098 #[doc(alias = "AVAudioSessionRouteSharingPolicyLongFormAudio")]
1099 pub const LongFormAudio: Self = Self(1);
1100 #[doc(alias = "AVAudioSessionRouteSharingPolicyLongForm")]
1101 #[deprecated]
1102 pub const LongForm: Self = Self(AVAudioSessionRouteSharingPolicy::LongFormAudio.0);
1103 #[doc(alias = "AVAudioSessionRouteSharingPolicyIndependent")]
1104 pub const Independent: Self = Self(2);
1105 #[doc(alias = "AVAudioSessionRouteSharingPolicyLongFormVideo")]
1106 pub const LongFormVideo: Self = Self(3);
1107}
1108
1109unsafe impl Encode for AVAudioSessionRouteSharingPolicy {
1110 const ENCODING: Encoding = NSUInteger::ENCODING;
1111}
1112
1113unsafe impl RefEncode for AVAudioSessionRouteSharingPolicy {
1114 const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
1115}
1116
1117/// The prompt style is a hint to sessions that use AVAudioSessionModeVoicePrompt to modify the type of
1118/// prompt they play in response to other audio activity on the system, such as Siri or phone calls.
1119/// Sessions that issue voice prompts are encouraged to pay attention to changes in the prompt style and
1120/// modify their prompts in response. Apple encourages the use of non-verbal prompts when the Short
1121/// style is requested.
1122///
1123/// Indicates that another session is actively using microphone input and would be negatively impacted
1124/// by having prompts play at that time. For example if Siri is recognizing speech, having navigation or
1125/// exercise prompts play, could interfere with its ability to accurately recognize the user’s speech.
1126/// Client sessions should refrain from playing any prompts while the prompt style is None.
1127///
1128/// Indicates one of three states: Siri is active but not recording, voicemail playback is active, or
1129/// voice call is active. Short, non-verbal versions of prompts should be used.
1130///
1131/// Indicates that normal (long, verbal) versions of prompts may be used.
1132///
1133/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionpromptstyle?language=objc)
1134// NS_ENUM
1135#[repr(transparent)]
1136#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
1137pub struct AVAudioSessionPromptStyle(pub NSUInteger);
1138impl AVAudioSessionPromptStyle {
1139 #[doc(alias = "AVAudioSessionPromptStyleNone")]
1140 pub const None: Self = Self(0x6e6f6e65);
1141 #[doc(alias = "AVAudioSessionPromptStyleShort")]
1142 pub const Short: Self = Self(0x73687274);
1143 #[doc(alias = "AVAudioSessionPromptStyleNormal")]
1144 pub const Normal: Self = Self(0x6e726d6c);
1145}
1146
1147unsafe impl Encode for AVAudioSessionPromptStyle {
1148 const ENCODING: Encoding = NSUInteger::ENCODING;
1149}
1150
1151unsafe impl RefEncode for AVAudioSessionPromptStyle {
1152 const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
1153}
1154
1155/// Constants indicating stereo input audio orientation, for use with built-in mic input data sources with a stereo polar pattern selected.
1156///
1157///
1158/// Indicates that audio capture orientation is not applicable (on mono capture, for instance).
1159///
1160/// Indicates that audio capture should be oriented vertically, Lightning connector on the bottom.
1161///
1162/// Indicates that audio capture should be oriented vertically, Lightning connector on the top.
1163///
1164/// Indicates that audio capture should be oriented horizontally, Lightning connector on the right.
1165///
1166/// Indicates that audio capture should be oriented horizontally, Lightning connector on the left.
1167///
1168/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiostereoorientation?language=objc)
1169// NS_ENUM
1170#[repr(transparent)]
1171#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
1172pub struct AVAudioStereoOrientation(pub NSInteger);
1173impl AVAudioStereoOrientation {
1174 #[doc(alias = "AVAudioStereoOrientationNone")]
1175 pub const None: Self = Self(0);
1176 #[doc(alias = "AVAudioStereoOrientationPortrait")]
1177 pub const Portrait: Self = Self(1);
1178 #[doc(alias = "AVAudioStereoOrientationPortraitUpsideDown")]
1179 pub const PortraitUpsideDown: Self = Self(2);
1180 #[doc(alias = "AVAudioStereoOrientationLandscapeRight")]
1181 pub const LandscapeRight: Self = Self(3);
1182 #[doc(alias = "AVAudioStereoOrientationLandscapeLeft")]
1183 pub const LandscapeLeft: Self = Self(4);
1184}
1185
1186unsafe impl Encode for AVAudioStereoOrientation {
1187 const ENCODING: Encoding = NSInteger::ENCODING;
1188}
1189
1190unsafe impl RefEncode for AVAudioStereoOrientation {
1191 const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
1192}
1193
1194/// These are the values returned by recordPermission.
1195///
1196/// The user has not yet been asked for permission.
1197///
1198/// The user has been asked and has denied permission.
1199///
1200/// The user has been asked and has granted permission.
1201///
1202/// Introduced: ios(8.0), watchos(4.0)
1203///
1204/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionrecordpermission?language=objc)
1205// NS_ENUM
1206#[repr(transparent)]
1207#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
1208pub struct AVAudioSessionRecordPermission(pub NSUInteger);
1209impl AVAudioSessionRecordPermission {
1210 #[doc(alias = "AVAudioSessionRecordPermissionUndetermined")]
1211 #[deprecated]
1212 pub const Undetermined: Self = Self(0x756e6474);
1213 #[doc(alias = "AVAudioSessionRecordPermissionDenied")]
1214 #[deprecated]
1215 pub const Denied: Self = Self(0x64656e79);
1216 #[doc(alias = "AVAudioSessionRecordPermissionGranted")]
1217 #[deprecated]
1218 pub const Granted: Self = Self(0x67726e74);
1219}
1220
1221unsafe impl Encode for AVAudioSessionRecordPermission {
1222 const ENCODING: Encoding = NSUInteger::ENCODING;
1223}
1224
1225unsafe impl RefEncode for AVAudioSessionRecordPermission {
1226 const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
1227}
1228
1229/// [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionrenderingmode?language=objc)
1230// NS_ENUM
1231#[repr(transparent)]
1232#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
1233pub struct AVAudioSessionRenderingMode(pub NSInteger);
1234impl AVAudioSessionRenderingMode {
1235 /// Default Mode when no asset is loaded or playing
1236 #[doc(alias = "AVAudioSessionRenderingModeNotApplicable")]
1237 pub const NotApplicable: Self = Self(0);
1238 /// Default mode for non multi-channel cases
1239 #[doc(alias = "AVAudioSessionRenderingModeMonoStereo")]
1240 pub const MonoStereo: Self = Self(1);
1241 /// Default mode for multi-channel cases that do not fall into the modes below
1242 #[doc(alias = "AVAudioSessionRenderingModeSurround")]
1243 pub const Surround: Self = Self(2);
1244 /// Fallback mode if provided content is Dolby variant but hardware capabilities don't support it
1245 #[doc(alias = "AVAudioSessionRenderingModeSpatialAudio")]
1246 pub const SpatialAudio: Self = Self(3);
1247 /// Dolby Audio mode
1248 #[doc(alias = "AVAudioSessionRenderingModeDolbyAudio")]
1249 pub const DolbyAudio: Self = Self(4);
1250 /// Dolby Atmos mode
1251 #[doc(alias = "AVAudioSessionRenderingModeDolbyAtmos")]
1252 pub const DolbyAtmos: Self = Self(5);
1253}
1254
1255unsafe impl Encode for AVAudioSessionRenderingMode {
1256 const ENCODING: Encoding = NSInteger::ENCODING;
1257}
1258
1259unsafe impl RefEncode for AVAudioSessionRenderingMode {
1260 const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
1261}
1262
1263/// Various modes to inject audio coming from a session to another app’s input stream
1264///
1265/// Applications can state their intent to mix locally generated audio, which should consist primarily of
1266/// synthesized speech, to another app's input stream. This feature is intended to be used by accessibility apps
1267/// implementing augmentative and alternative communication systems that enable users with disabilities to
1268/// communicate with synthesized speech. When input is muted, microphone injection will also be muted.
1269///
1270/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmicrophoneinjectionmode?language=objc)
1271// NS_ENUM
1272#[repr(transparent)]
1273#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
1274pub struct AVAudioSessionMicrophoneInjectionMode(pub NSInteger);
1275impl AVAudioSessionMicrophoneInjectionMode {
1276 /// Default state, microphone injection is not preferred
1277 #[doc(alias = "AVAudioSessionMicrophoneInjectionModeNone")]
1278 pub const None: Self = Self(0);
1279 /// Inject Spoken Audio, like synthesized speech, with microphone audio
1280 #[doc(alias = "AVAudioSessionMicrophoneInjectionModeSpokenAudio")]
1281 pub const SpokenAudio: Self = Self(1);
1282}
1283
1284unsafe impl Encode for AVAudioSessionMicrophoneInjectionMode {
1285 const ENCODING: Encoding = NSInteger::ENCODING;
1286}
1287
1288unsafe impl RefEncode for AVAudioSessionMicrophoneInjectionMode {
1289 const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
1290}