objc2-avf-audio 0.3.2

Bindings to the AVFAudio framework
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
//! This file has been automatically generated by `objc2`'s `header-translator`.
//! DO NOT EDIT
use objc2::__framework_prelude::*;
use objc2_foundation::*;

use crate::*;

/// A port describes a specific type of audio input or output device or connector.
///
/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionport?language=objc)
// NS_TYPED_ENUM
pub type AVAudioSessionPort = NSString;

extern "C" {
    /// Continuity microphone for appletv.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportcontinuitymicrophone?language=objc)
    pub static AVAudioSessionPortContinuityMicrophone: Option<&'static AVAudioSessionPort>;
}

extern "C" {
    /// Line level input on a dock connector
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportlinein?language=objc)
    pub static AVAudioSessionPortLineIn: Option<&'static AVAudioSessionPort>;
}

extern "C" {
    /// Built-in microphone on an iOS device
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportbuiltinmic?language=objc)
    pub static AVAudioSessionPortBuiltInMic: Option<&'static AVAudioSessionPort>;
}

extern "C" {
    /// Microphone on a wired headset.  Headset refers to an accessory that has headphone outputs paired with a
    /// microphone.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportheadsetmic?language=objc)
    pub static AVAudioSessionPortHeadsetMic: Option<&'static AVAudioSessionPort>;
}

extern "C" {
    /// Line level output on a dock connector
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportlineout?language=objc)
    pub static AVAudioSessionPortLineOut: Option<&'static AVAudioSessionPort>;
}

extern "C" {
    /// Headphone or headset output
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportheadphones?language=objc)
    pub static AVAudioSessionPortHeadphones: Option<&'static AVAudioSessionPort>;
}

extern "C" {
    /// Output on a Bluetooth A2DP device
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportbluetootha2dp?language=objc)
    pub static AVAudioSessionPortBluetoothA2DP: Option<&'static AVAudioSessionPort>;
}

extern "C" {
    /// The speaker you hold to your ear when on a phone call
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportbuiltinreceiver?language=objc)
    pub static AVAudioSessionPortBuiltInReceiver: Option<&'static AVAudioSessionPort>;
}

extern "C" {
    /// Built-in speaker on an iOS device
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportbuiltinspeaker?language=objc)
    pub static AVAudioSessionPortBuiltInSpeaker: Option<&'static AVAudioSessionPort>;
}

extern "C" {
    /// Output via High-Definition Multimedia Interface
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionporthdmi?language=objc)
    pub static AVAudioSessionPortHDMI: Option<&'static AVAudioSessionPort>;
}

extern "C" {
    /// Output on a remote Air Play device
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportairplay?language=objc)
    pub static AVAudioSessionPortAirPlay: Option<&'static AVAudioSessionPort>;
}

extern "C" {
    /// Output on a Bluetooth Low Energy device
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportbluetoothle?language=objc)
    pub static AVAudioSessionPortBluetoothLE: Option<&'static AVAudioSessionPort>;
}

extern "C" {
    /// Input or output on a Bluetooth Hands-Free Profile device
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportbluetoothhfp?language=objc)
    pub static AVAudioSessionPortBluetoothHFP: Option<&'static AVAudioSessionPort>;
}

extern "C" {
    /// Input or output on a Universal Serial Bus device
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportusbaudio?language=objc)
    pub static AVAudioSessionPortUSBAudio: Option<&'static AVAudioSessionPort>;
}

extern "C" {
    /// Input or output via Car Audio
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportcaraudio?language=objc)
    pub static AVAudioSessionPortCarAudio: Option<&'static AVAudioSessionPort>;
}

extern "C" {
    /// Input or output that does not correspond to real audio hardware
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportvirtual?language=objc)
    pub static AVAudioSessionPortVirtual: Option<&'static AVAudioSessionPort>;
}

extern "C" {
    /// Input or output connected via the PCI (Peripheral Component Interconnect) bus
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportpci?language=objc)
    pub static AVAudioSessionPortPCI: Option<&'static AVAudioSessionPort>;
}

extern "C" {
    /// Input or output connected via FireWire
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportfirewire?language=objc)
    pub static AVAudioSessionPortFireWire: Option<&'static AVAudioSessionPort>;
}

extern "C" {
    /// Input or output connected via DisplayPort
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportdisplayport?language=objc)
    pub static AVAudioSessionPortDisplayPort: Option<&'static AVAudioSessionPort>;
}

extern "C" {
    /// Input or output connected via AVB (Audio Video Bridging)
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportavb?language=objc)
    pub static AVAudioSessionPortAVB: Option<&'static AVAudioSessionPort>;
}

extern "C" {
    /// Input or output connected via Thunderbolt
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportthunderbolt?language=objc)
    pub static AVAudioSessionPortThunderbolt: Option<&'static AVAudioSessionPort>;
}

/// A category defines a broad set of behaviors for a session.
///
/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioncategory?language=objc)
// NS_TYPED_ENUM
pub type AVAudioSessionCategory = NSString;

extern "C" {
    /// Use this category for background sounds such as rain, car engine noise, etc.
    /// Mixes with other music.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioncategoryambient?language=objc)
    pub static AVAudioSessionCategoryAmbient: Option<&'static AVAudioSessionCategory>;
}

extern "C" {
    /// Use this category for background sounds.  Other music will stop playing.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioncategorysoloambient?language=objc)
    pub static AVAudioSessionCategorySoloAmbient: Option<&'static AVAudioSessionCategory>;
}

extern "C" {
    /// Use this category for music tracks.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioncategoryplayback?language=objc)
    pub static AVAudioSessionCategoryPlayback: Option<&'static AVAudioSessionCategory>;
}

extern "C" {
    /// Use this category when recording audio.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioncategoryrecord?language=objc)
    pub static AVAudioSessionCategoryRecord: Option<&'static AVAudioSessionCategory>;
}

extern "C" {
    /// Use this category when recording and playing back audio.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioncategoryplayandrecord?language=objc)
    pub static AVAudioSessionCategoryPlayAndRecord: Option<&'static AVAudioSessionCategory>;
}

extern "C" {
    /// Use this category when using a hardware codec or signal processor while
    /// not playing or recording audio.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioncategoryaudioprocessing?language=objc)
    #[deprecated = "No longer supported"]
    pub static AVAudioSessionCategoryAudioProcessing: Option<&'static AVAudioSessionCategory>;
}

extern "C" {
    /// Use this category to customize the usage of available audio accessories and built-in audio hardware.
    /// For example, this category provides an application with the ability to use an available USB output
    /// and headphone output simultaneously for separate, distinct streams of audio data. Use of
    /// this category by an application requires a more detailed knowledge of, and interaction with,
    /// the capabilities of the available audio routes.  May be used for input, output, or both.
    /// Note that not all output types and output combinations are eligible for multi-route.  Input is limited
    /// to the last-in input port. Eligible inputs consist of the following:
    /// AVAudioSessionPortUSBAudio, AVAudioSessionPortHeadsetMic, and AVAudioSessionPortBuiltInMic.
    /// Eligible outputs consist of the following:
    /// AVAudioSessionPortUSBAudio, AVAudioSessionPortLineOut, AVAudioSessionPortHeadphones, AVAudioSessionPortHDMI,
    /// and AVAudioSessionPortBuiltInSpeaker.
    /// Note that AVAudioSessionPortBuiltInSpeaker is only allowed to be used when there are no other eligible
    /// outputs connected.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioncategorymultiroute?language=objc)
    pub static AVAudioSessionCategoryMultiRoute: Option<&'static AVAudioSessionCategory>;
}

/// Modes modify the audio category in order to introduce behavior that is tailored to the specific
/// use of audio within an application.  Available in iOS 5.0 and greater.
///
/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmode?language=objc)
// NS_TYPED_ENUM
pub type AVAudioSessionMode = NSString;

extern "C" {
    /// The default mode
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmodedefault?language=objc)
    pub static AVAudioSessionModeDefault: Option<&'static AVAudioSessionMode>;
}

extern "C" {
    /// Only valid with AVAudioSessionCategoryPlayAndRecord.  Appropriate for Voice over IP
    /// (VoIP) applications.  Reduces the number of allowable audio routes to be only those
    /// that are appropriate for VoIP applications and may engage appropriate system-supplied
    /// signal processing.  Has the side effect of setting AVAudioSessionCategoryOptionAllowBluetoothHFP.
    /// Using this mode without the VoiceProcessing IO unit or AVAudioEngine with voice processing enabled will result in the following:
    /// - Chat-specific signal processing such as echo cancellation or automatic gain correction will not be loaded
    /// - Dynamic processing on input and output will be disabled resulting in a lower output playback level.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmodevoicechat?language=objc)
    pub static AVAudioSessionModeVoiceChat: Option<&'static AVAudioSessionMode>;
}

extern "C" {
    /// Set by Game Kit on behalf of an application that uses a GKVoiceChat object; valid
    /// only with the AVAudioSessionCategoryPlayAndRecord category.
    /// Do not set this mode directly. If you need similar behavior and are not using
    /// a GKVoiceChat object, use AVAudioSessionModeVoiceChat instead.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmodegamechat?language=objc)
    pub static AVAudioSessionModeGameChat: Option<&'static AVAudioSessionMode>;
}

extern "C" {
    /// Only valid with AVAudioSessionCategoryPlayAndRecord or AVAudioSessionCategoryRecord.
    /// Modifies the audio routing options and may engage appropriate system-supplied signal processing.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmodevideorecording?language=objc)
    pub static AVAudioSessionModeVideoRecording: Option<&'static AVAudioSessionMode>;
}

extern "C" {
    /// Appropriate for applications that wish to minimize the effect of system-supplied signal
    /// processing for input and/or output audio signals.
    /// This mode disables some dynamics processing on input and output resulting in a lower output playback level.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmodemeasurement?language=objc)
    pub static AVAudioSessionModeMeasurement: Option<&'static AVAudioSessionMode>;
}

extern "C" {
    /// Appropriate for applications playing movie content. Only valid with AVAudioSessionCategoryPlayback.
    /// Setting this mode engages appropriate output signal processing for movie playback scenarios.
    /// Content using this mode is eligible for Enhance Dialogue processing on supported routes with capable hardware
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmodemovieplayback?language=objc)
    pub static AVAudioSessionModeMoviePlayback: Option<&'static AVAudioSessionMode>;
}

extern "C" {
    /// Only valid with kAudioSessionCategory_PlayAndRecord. Reduces the number of allowable audio
    /// routes to be only those that are appropriate for video chat applications. May engage appropriate
    /// system-supplied signal processing.  Has the side effect of setting
    /// AVAudioSessionCategoryOptionAllowBluetoothHFP and AVAudioSessionCategoryOptionDefaultToSpeaker.
    /// Using this mode without the VoiceProcessing IO unit or AVAudioEngine with voice processing enabled will result in the following:
    /// - Chat-specific signal processing such as echo cancellation or automatic gain correction will not be loaded
    /// - Dynamic processing on input and output will be disabled resulting in a lower output playback level.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmodevideochat?language=objc)
    pub static AVAudioSessionModeVideoChat: Option<&'static AVAudioSessionMode>;
}

extern "C" {
    /// Appropriate for applications which play spoken audio and wish to be paused (via audio session interruption) rather than ducked
    /// if another app (such as a navigation app) plays a spoken audio prompt.  Examples of apps that would use this are podcast players and
    /// audio books.  For more information, see the related category option AVAudioSessionCategoryOptionInterruptSpokenAudioAndMixWithOthers.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmodespokenaudio?language=objc)
    pub static AVAudioSessionModeSpokenAudio: Option<&'static AVAudioSessionMode>;
}

extern "C" {
    /// Appropriate for applications which play audio using text to speech. Setting this mode allows for different routing behaviors when
    /// connected to certain audio devices such as CarPlay. An example of an app that would use this mode is a turn by turn navigation app that
    /// plays short prompts to the user. Typically, these same types of applications would also configure their session to use
    /// AVAudioSessionCategoryOptionDuckOthers and AVAudioSessionCategoryOptionInterruptSpokenAudioAndMixWithOthers
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmodevoiceprompt?language=objc)
    pub static AVAudioSessionModeVoicePrompt: Option<&'static AVAudioSessionMode>;
}

extern "C" {
    /// Appropriate for applications playing short-form video content.
    ///
    /// Only valid with ``AVAudioSessionCategoryPlayback``.
    /// Not applicable with ``AVAudioSessionRouteSharingPolicy/AVAudioSessionRouteSharingPolicyLongFormAudio``,
    /// or ``AVAudioSessionRouteSharingPolicy/AVAudioSessionRouteSharingPolicyLongFormVideo``.
    ///
    /// When this mode is set:
    /// - system will make informed decisions to automatically unmute the output of the media if the user shows intention of unmuting.
    /// - When auto-unmuted, ``AVAudioSessionUserIntentToUnmuteOutputNotification`` and ``AVAudioSessionOutputMuteStateChangeNotification`` will be sent.
    /// - if the session is output muted, system may prevent interrupting other active audio apps.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmodeshortformvideo?language=objc)
    pub static AVAudioSessionModeShortFormVideo: Option<&'static AVAudioSessionMode>;
}

extern "C" {
    /// Notification sent to registered listeners when the system has interrupted the audio
    /// session and when the interruption has ended.
    ///
    /// Check the notification's userInfo dictionary for the interruption type, which is either
    /// Begin or End. In the case of an end interruption notification, check the userInfo dictionary
    /// for AVAudioSessionInterruptionOptions that indicate whether audio playback should resume.
    /// In the case of a begin interruption notification, the reason for the interruption can be found
    /// within the info dictionary under the key AVAudioSessionInterruptionReasonKey.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioninterruptionnotification?language=objc)
    pub static AVAudioSessionInterruptionNotification: Option<&'static NSNotificationName>;
}

extern "C" {
    /// Notification sent to registered listeners when an audio route change has occurred.
    ///
    /// Check the notification's userInfo dictionary for the route change reason and for a description
    /// of the previous audio route.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionroutechangenotification?language=objc)
    pub static AVAudioSessionRouteChangeNotification: Option<&'static NSNotificationName>;
}

extern "C" {
    /// Notification sent to registered listeners if the media server is killed.
    ///
    /// In the event that the server is killed, take appropriate steps to handle requests that come in
    /// before the server resets.  See Technical Q
    /// &A
    /// QA1749.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmediaserviceswerelostnotification?language=objc)
    pub static AVAudioSessionMediaServicesWereLostNotification: Option<&'static NSNotificationName>;
}

extern "C" {
    /// Notification sent to registered listeners when the media server restarts.
    ///
    /// In the event that the server restarts, take appropriate steps to re-initialize any audio objects
    /// used by your application.  See Technical Q
    /// &A
    /// QA1749.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmediaserviceswereresetnotification?language=objc)
    pub static AVAudioSessionMediaServicesWereResetNotification:
        Option<&'static NSNotificationName>;
}

extern "C" {
    /// Notification sent to registered listeners when they are in the foreground with an active
    /// audio session and primary audio from other applications starts and stops.
    ///
    /// Check the notification's userInfo dictionary for the notification type, which is either Begin or
    /// End. Foreground applications may use this notification as a hint to enable or disable audio that
    /// is secondary to the functionality of the application. For more information, see the related
    /// property secondaryAudioShouldBeSilencedHint.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionsilencesecondaryaudiohintnotification?language=objc)
    pub static AVAudioSessionSilenceSecondaryAudioHintNotification:
        Option<&'static NSNotificationName>;
}

extern "C" {
    /// Notification sent to registered listeners when spatial playback capabilities are changed due to a
    /// change in user preference.
    ///
    /// Check the notification's userInfo dictionary for AVAudioSessionSpatialAudioEnabledKey to check if spatial
    /// audio is enabled.
    ///
    /// Observers of this notification should also observe AVAudioSessionRouteChangeNotification since a route change
    /// may also result in a change in the ability for the system to play spatial audio. Use
    /// AVAudioSessionPortDescription's isSpatialAudioEnabled property to check if the current route supports
    /// spatialized playback.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionspatialplaybackcapabilitieschangednotification?language=objc)
    pub static AVAudioSessionSpatialPlaybackCapabilitiesChangedNotification:
        Option<&'static NSNotificationName>;
}

extern "C" {
    /// Notification sent to registered listeners when the resolved rendering mode changes.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionrenderingmodechangenotification?language=objc)
    pub static AVAudioSessionRenderingModeChangeNotification: Option<&'static NSNotificationName>;
}

extern "C" {
    /// Notification sent to registered listeners when the rendering capabilities change.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionrenderingcapabilitieschangenotification?language=objc)
    pub static AVAudioSessionRenderingCapabilitiesChangeNotification:
        Option<&'static NSNotificationName>;
}

extern "C" {
    /// Notification sent to registered listeners when the system's capability to inject audio into input stream is changed
    ///
    /// Check the notification's userInfo dictionary for AVAudioSessionMicrophoneInjectionIsAvailableKey to check if microphone
    /// injection is available. Use AVAudioSession's isMicrophoneInjectionAvailable property to check if microphone injection is available
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmicrophoneinjectioncapabilitieschangenotification?language=objc)
    pub static AVAudioSessionMicrophoneInjectionCapabilitiesChangeNotification:
        Option<&'static NSNotificationName>;
}

extern "C" {
    /// Notification sent to registered listeners when session's output mute state changes.
    ///
    /// The userInfo dictionary will contain the updated output mute value as accessed by ``AVAudioSessionMuteStateKey``
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionoutputmutestatechangenotification?language=objc)
    pub static AVAudioSessionOutputMuteStateChangeNotification: Option<&'static NSNotificationName>;
}

extern "C" {
    /// Keys for ``AVAudioSessionOutputMuteStateChangeNotification``
    /// Value is `NSNumber` type with boolean value 0 for unmuted or value 1 for muted (samples zeroed out)
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmutestatekey?language=objc)
    pub static AVAudioSessionMuteStateKey: Option<&'static NSString>;
}

extern "C" {
    /// Notification sent to registered listeners when the application's output is muted and user hints to unmute.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionuserintenttounmuteoutputnotification?language=objc)
    pub static AVAudioSessionUserIntentToUnmuteOutputNotification:
        Option<&'static NSNotificationName>;
}

extern "C" {
    /// keys for AVAudioSessionSpatialPlaybackCapabilitiesChangedNotification
    /// value is an NSNumber whose boolean value indicates if spatial audio enabled.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionspatialaudioenabledkey?language=objc)
    pub static AVAudioSessionSpatialAudioEnabledKey: Option<&'static NSString>;
}

extern "C" {
    /// keys for AVAudioSessionInterruptionNotification
    /// Value is an NSNumber representing an AVAudioSessionInterruptionType
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioninterruptiontypekey?language=objc)
    pub static AVAudioSessionInterruptionTypeKey: Option<&'static NSString>;
}

extern "C" {
    /// Only present for end interruption events.  Value is of type AVAudioSessionInterruptionOptions.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioninterruptionoptionkey?language=objc)
    pub static AVAudioSessionInterruptionOptionKey: Option<&'static NSString>;
}

extern "C" {
    /// Only present in begin interruption events. Value is of type AVAudioSessionInterruptionReason.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioninterruptionreasonkey?language=objc)
    pub static AVAudioSessionInterruptionReasonKey: Option<&'static NSString>;
}

extern "C" {
    /// Only present in begin interruption events, where the interruption is a direct result of the
    /// application being suspended by the operating sytem. Value is a boolean NSNumber, where a true
    /// value indicates that the interruption is the result of the application being suspended, rather
    /// than being interrupted by another audio session.
    ///
    /// Starting in iOS 10, the system will deactivate the audio session of most apps in response to the
    /// app process being suspended. When the app starts running again, it will receive the notification
    /// that its session has been deactivated by the system. Note that the notification is necessarily
    /// delayed in time, due to the fact that the application was suspended at the time the session was
    /// deactivated by the system and the notification can only be delivered once the app is running
    /// again.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioninterruptionwassuspendedkey?language=objc)
    #[deprecated = "No longer supported - see AVAudioSessionInterruptionReasonKey"]
    pub static AVAudioSessionInterruptionWasSuspendedKey: Option<&'static NSString>;
}

extern "C" {
    /// keys for AVAudioSessionRouteChangeNotification
    /// value is an NSNumber representing an AVAudioSessionRouteChangeReason
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionroutechangereasonkey?language=objc)
    pub static AVAudioSessionRouteChangeReasonKey: Option<&'static NSString>;
}

extern "C" {
    /// value is AVAudioSessionRouteDescription *
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionroutechangepreviousroutekey?language=objc)
    pub static AVAudioSessionRouteChangePreviousRouteKey: Option<&'static NSString>;
}

extern "C" {
    /// keys for AVAudioSessionSilenceSecondaryAudioHintNotification
    /// value is an NSNumber representing an AVAudioSessionSilenceSecondaryAudioHintType
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionsilencesecondaryaudiohinttypekey?language=objc)
    pub static AVAudioSessionSilenceSecondaryAudioHintTypeKey: Option<&'static NSString>;
}

extern "C" {
    /// keys for AVAudioSessionRenderingModeChangeNotification
    /// Contains a payload of NSInteger representing the new resolved rendering mode
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionrenderingmodenewrenderingmodekey?language=objc)
    pub static AVAudioSessionRenderingModeNewRenderingModeKey: Option<&'static NSString>;
}

extern "C" {
    /// Keys for AVAudioSessionMicrophoneInjectionCapabilitiesChangeNotification
    ///
    /// Indicates if microphone injection is available.
    /// Value is an NSNumber whose boolean value indicates if microphone injection is available.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmicrophoneinjectionisavailablekey?language=objc)
    pub static AVAudioSessionMicrophoneInjectionIsAvailableKey: Option<&'static NSString>;
}

extern "C" {
    /// Notification sent to registered listeners when there are changes in ``availableInputs``.
    ///
    /// There is no payload (userInfo dictionary) associated with the ``AVAudioSessionAvailableInputsChangeNotification`` notification.
    ///
    /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionavailableinputschangenotification?language=objc)
    pub static AVAudioSessionAvailableInputsChangeNotification: Option<&'static NSNotificationName>;
}

/// For use with activateWithOptions:completionHandler:
///
/// Reserved for future use. Added in watchOS 5.0.
///
/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionactivationoptions?language=objc)
// NS_OPTIONS
#[repr(transparent)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct AVAudioSessionActivationOptions(pub NSUInteger);
bitflags::bitflags! {
    impl AVAudioSessionActivationOptions: NSUInteger {
        #[doc(alias = "AVAudioSessionActivationOptionNone")]
        const None = 0;
    }
}

unsafe impl Encode for AVAudioSessionActivationOptions {
    const ENCODING: Encoding = NSUInteger::ENCODING;
}

unsafe impl RefEncode for AVAudioSessionActivationOptions {
    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
}

/// For use with overrideOutputAudioPort:error:
///
/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionportoverride?language=objc)
// NS_ENUM
#[repr(transparent)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct AVAudioSessionPortOverride(pub NSUInteger);
impl AVAudioSessionPortOverride {
    /// No override.  Return audio routing to the default state for the current audio category.
    #[doc(alias = "AVAudioSessionPortOverrideNone")]
    pub const None: Self = Self(0);
    /// Route audio output to speaker.  Use this override with AVAudioSessionCategoryPlayAndRecord,
    /// which by default routes the output to the receiver.
    #[doc(alias = "AVAudioSessionPortOverrideSpeaker")]
    pub const Speaker: Self = Self(0x73706b72);
}

unsafe impl Encode for AVAudioSessionPortOverride {
    const ENCODING: Encoding = NSUInteger::ENCODING;
}

unsafe impl RefEncode for AVAudioSessionPortOverride {
    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
}

/// Values for AVAudioSessionRouteChangeReasonKey in AVAudioSessionRouteChangeNotification's
/// userInfo dictionary
///
/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionroutechangereason?language=objc)
// NS_ENUM
#[repr(transparent)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct AVAudioSessionRouteChangeReason(pub NSUInteger);
impl AVAudioSessionRouteChangeReason {
    /// The reason is unknown.
    #[doc(alias = "AVAudioSessionRouteChangeReasonUnknown")]
    pub const Unknown: Self = Self(0);
    /// A new device became available (e.g. headphones have been plugged in).
    #[doc(alias = "AVAudioSessionRouteChangeReasonNewDeviceAvailable")]
    pub const NewDeviceAvailable: Self = Self(1);
    /// The old device became unavailable (e.g. headphones have been unplugged).
    #[doc(alias = "AVAudioSessionRouteChangeReasonOldDeviceUnavailable")]
    pub const OldDeviceUnavailable: Self = Self(2);
    /// The audio category has changed (e.g. AVAudioSessionCategoryPlayback has been changed to
    /// AVAudioSessionCategoryPlayAndRecord).
    #[doc(alias = "AVAudioSessionRouteChangeReasonCategoryChange")]
    pub const CategoryChange: Self = Self(3);
    /// The route has been overridden (e.g. category is AVAudioSessionCategoryPlayAndRecord and
    /// the output has been changed from the receiver, which is the default, to the speaker).
    #[doc(alias = "AVAudioSessionRouteChangeReasonOverride")]
    pub const Override: Self = Self(4);
    /// The device woke from sleep.
    #[doc(alias = "AVAudioSessionRouteChangeReasonWakeFromSleep")]
    pub const WakeFromSleep: Self = Self(6);
    /// Returned when there is no route for the current category (for instance, the category is
    /// AVAudioSessionCategoryRecord but no input device is available).
    #[doc(alias = "AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory")]
    pub const NoSuitableRouteForCategory: Self = Self(7);
    /// Indicates that the set of input and/our output ports has not changed, but some aspect of
    /// their configuration has changed.  For example, a port's selected data source has changed.
    /// (Introduced in iOS 7.0, watchOS 2.0, tvOS 9.0).
    #[doc(alias = "AVAudioSessionRouteChangeReasonRouteConfigurationChange")]
    pub const RouteConfigurationChange: Self = Self(8);
}

unsafe impl Encode for AVAudioSessionRouteChangeReason {
    const ENCODING: Encoding = NSUInteger::ENCODING;
}

unsafe impl RefEncode for AVAudioSessionRouteChangeReason {
    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
}

/// Customization of various aspects of a category's behavior.
/// Use with ``AVAudioSession/setCategory:mode:options:error:``.
///
/// Applications must be prepared for changing category options to fail as behavior may change
/// in future releases. If an application changes its category, it should reassert the options,
/// since they are not sticky across category changes. Introduced in iOS 6.0 / watchOS 2.0 /
/// tvOS 9.0.
///
/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioncategoryoptions?language=objc)
// NS_OPTIONS
#[repr(transparent)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct AVAudioSessionCategoryOptions(pub NSUInteger);
bitflags::bitflags! {
    impl AVAudioSessionCategoryOptions: NSUInteger {
/// Controls whether other active audio apps will be interrupted or mixed with when your app's
/// audio session goes active. Details depend on the category.
///
/// - ``AVAudioSessionCategoryPlayAndRecord`` or ``AVAudioSessionCategoryMultiRoute``:
/// MixWithOthers defaults to false, but can be set to true, allowing other applications to
/// play in the background while your app has both audio input and output enabled.
///
/// - ``AVAudioSessionCategoryPlayback``:
/// MixWithOthers defaults to false, but can be set to true, allowing other applications to
/// play in the background. Your app will still be able to play regardless of the setting
/// of the ringer switch.
///
/// - Other categories:
/// MixWithOthers defaults to false and cannot be changed.
///
/// MixWithOthers is only valid with ``AVAudioSessionCategoryPlayAndRecord``,
/// ``AVAudioSessionCategoryPlayback``, and ``AVAudioSessionCategoryMultiRoute``.
        #[doc(alias = "AVAudioSessionCategoryOptionMixWithOthers")]
        const MixWithOthers = 0x1;
/// Controls whether or not other active audio apps will be ducked when when your app's audio
/// session goes active. An example of this is a workout app, which provides periodic updates to
/// the user. It reduces the volume of any music currently being played while it provides its
/// status.
///
/// Defaults to off. Note that the other audio will be ducked for as long as the current session
/// is active. You will need to deactivate your audio session when you want to restore full
/// volume playback (un-duck) other sessions.
///
/// Setting this option will also make your session mixable with others
/// (``AVAudioSessionCategoryOptionMixWithOthers`` will be set).
///
/// DuckOthers is only valid with ``AVAudioSessionCategoryAmbient``,
/// ``AVAudioSessionCategoryPlayAndRecord``, ``AVAudioSessionCategoryPlayback``, and
/// ``AVAudioSessionCategoryMultiRoute``.
        #[doc(alias = "AVAudioSessionCategoryOptionDuckOthers")]
        const DuckOthers = 0x2;
/// Deprecated - please see ``AVAudioSessionCategoryOptionAllowBluetoothHFP``
        #[doc(alias = "AVAudioSessionCategoryOptionAllowBluetooth")]
#[deprecated]
        const AllowBluetooth = 0x4;
/// Allows an application to change the default behavior of some audio session categories with
/// regard to whether Bluetooth Hands-Free Profile (HFP) devices are available for routing. The
/// exact behavior depends on the category.
///
/// - ``AVAudioSessionCategoryPlayAndRecord``:
/// AllowBluetoothHFP defaults to false, but can be set to true, allowing a paired bluetooth
/// HFP device to appear as an available route for input, while playing through the
/// category-appropriate output.
///
/// - ``AVAudioSessionCategoryRecord``:
/// AllowBluetoothHFP defaults to false, but can be set to true, allowing a paired Bluetooth
/// HFP device to appear as an available route for input.
///
/// - Other categories:
/// AllowBluetoothHFP defaults to false and cannot be changed. Enabling Bluetooth for input in
/// these categories is not allowed.
        #[doc(alias = "AVAudioSessionCategoryOptionAllowBluetoothHFP")]
        const AllowBluetoothHFP = 0x4;
/// Allows an application to change the default behavior of some audio session categories with
/// regard to the audio route. The exact behavior depends on the category.
///
/// - ``AVAudioSessionCategoryPlayAndRecord``:
/// DefaultToSpeaker will default to false, but can be set to true, routing to Speaker
/// (instead of Receiver) when no other audio route is connected.
///
/// - Other categories:
/// DefaultToSpeaker is always false and cannot be changed.
        #[doc(alias = "AVAudioSessionCategoryOptionDefaultToSpeaker")]
        const DefaultToSpeaker = 0x8;
/// When a session with InterruptSpokenAudioAndMixWithOthers set goes active, then if there is
/// another playing app whose session mode is ``AVAudioSessionModeSpokenAudio`` (for podcast
/// playback in the background, for example), then the spoken-audio session will be
/// interrupted. A good use of this is for a navigation app that provides prompts to its user:
/// it pauses any spoken audio currently being played while it plays the prompt.
///
/// InterruptSpokenAudioAndMixWithOthers defaults to off. Note that the other app's audio will
/// be paused for as long as the current session is active. You will need to deactivate your
/// audio session to allow the other session to resume playback. Setting this option will also
/// make your category mixable with others (``AVAudioSessionCategoryOptionMixWithOthers``
/// will be set). If you want other non-spoken audio apps to duck their audio when your app's session
/// goes active, also set ``AVAudioSessionCategoryOptionDuckOthers``.
///
/// Only valid with ``AVAudioSessionCategoryPlayAndRecord``,
/// ``AVAudioSessionCategoryPlayback``, and ``AVAudioSessionCategoryMultiRoute``.
        #[doc(alias = "AVAudioSessionCategoryOptionInterruptSpokenAudioAndMixWithOthers")]
        const InterruptSpokenAudioAndMixWithOthers = 0x11;
/// Allows an application to change the default behavior of some audio session categories with
/// regard to whether Bluetooth Advanced Audio Distribution Profile (A2DP) devices are
/// available for routing. The exact behavior depends on the category.
///
/// - ``AVAudioSessionCategoryPlayAndRecord``:
/// AllowBluetoothA2DP defaults to false, but can be set to true, allowing a paired
/// Bluetooth A2DP device to appear as an available route for output, while recording
/// through the category-appropriate input.
///
/// - ``AVAudioSessionCategoryMultiRoute`` and ``AVAudioSessionCategoryRecord``:
/// AllowBluetoothA2DP is false, and cannot be set to true.
///
/// - Other categories:
/// AllowBluetoothA2DP is always implicitly true and cannot be changed; Bluetooth A2DP ports
/// are always supported in output-only categories.
///
/// Setting both ``AVAudioSessionCategoryOptionAllowBluetoothHFP``
/// and ``AVAudioSessionCategoryOptionAllowBluetoothA2DP`` is
/// allowed. In cases where a single Bluetooth device supports both HFP and A2DP, the HFP
/// ports will be given a higher priority for routing. For HFP and A2DP ports on separate
/// hardware devices, the last-in wins rule applies.
        #[doc(alias = "AVAudioSessionCategoryOptionAllowBluetoothA2DP")]
        const AllowBluetoothA2DP = 0x20;
/// Allows an application to change the default behavior of some audio session categories
/// with regard to showing AirPlay devices as available routes. This option applies to
/// various categories in the same way as ``AVAudioSessionCategoryOptionAllowBluetoothA2DP``;
/// see above for details. Only valid with ``AVAudioSessionCategoryPlayAndRecord``.
        #[doc(alias = "AVAudioSessionCategoryOptionAllowAirPlay")]
        const AllowAirPlay = 0x40;
/// Some devices include a privacy feature that mutes the built-in microphone at a hardware level
/// under certain conditions e.g. when the Smart Folio of an iPad is closed. The default behavior is
/// to interrupt the session using the built-in microphone when that microphone is muted in hardware.
/// This option allows an application to opt out of the default behavior if it is using a category that
/// supports both input and output, such as ``AVAudioSessionCategoryPlayAndRecord``, and wants to
/// allow its session to stay activated even when the microphone has been muted. The result would be
/// that playback continues as normal, and microphone sample buffers would continue to be produced
/// but all microphone samples would have a value of zero.
///
/// This may be useful if an application knows that it wants to allow playback to continue and
/// recording/monitoring a muted microphone will not lead to a poor user experience. Attempting to use
/// this option with a session category that doesn't support the use of audio input will result in an error.
///
/// - Note Under the default policy, a session will be interrupted if it is running input at the time when
/// the microphone is muted in hardware. Similarly, attempting to start input when the microphone is
/// muted will fail.
/// - Note This option has no relation to the recordPermission property, which indicates whether or
/// not the user has granted permission to use microphone input.
        #[doc(alias = "AVAudioSessionCategoryOptionOverrideMutedMicrophoneInterruption")]
        const OverrideMutedMicrophoneInterruption = 0x80;
/// When this option is specified with a category that supports both input and output, the session
/// will enable full-bandwidth audio in both input
/// &
/// output directions, if the Bluetooth route supports
/// it (e.g. certain AirPods models). It is currently compatible only with mode ``AVAudioSessionModeDefault``.
///
/// - Support for this can be queried on input ports via the BluetoothMicrophone interface on a port,
/// via its member `highQualityRecording.isSupported`.
///
/// - Active sessions can see if full-bandwidth Bluetooth audio was successfully enabled by querying
/// the BluetoothMicrophone interface of the input port of the current route for:
/// `highQualityRecording.isEnabled`.
///
/// - When this option is provided alone, it will be enabled if the route supports it, otherwise the option
/// will be ignored. This option may be combined with ``AVAudioSessionCategoryOptionAllowBluetoothHFP``,
/// in which case HFP will be used as a fallback if the route does not support this
/// ``AVAudioSessionCategoryOptionBluetoothHighQualityRecording`` option.
///
/// - Note This option may increase input latency when enabled and is therefore not recommended for
/// real-time communication usage.
/// - Note Apps using ``AVAudioSessionCategoryOptionBluetoothHighQualityRecording``
/// may want to consider setting ``AVAudioSession/setPrefersNoInterruptionsFromSystemAlerts:error:``
/// while recording, to avoid the recording session being interrupted by an incoming call ringtone.
        #[doc(alias = "AVAudioSessionCategoryOptionBluetoothHighQualityRecording")]
        const BluetoothHighQualityRecording = 1<<19;
    }
}

unsafe impl Encode for AVAudioSessionCategoryOptions {
    const ENCODING: Encoding = NSUInteger::ENCODING;
}

unsafe impl RefEncode for AVAudioSessionCategoryOptions {
    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
}

/// Values for AVAudioSessionInterruptionTypeKey in AVAudioSessionInterruptionNotification's
/// userInfo dictionary.
///
/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioninterruptiontype?language=objc)
// NS_ENUM
#[repr(transparent)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct AVAudioSessionInterruptionType(pub NSUInteger);
impl AVAudioSessionInterruptionType {
    /// the system has interrupted your audio session
    #[doc(alias = "AVAudioSessionInterruptionTypeBegan")]
    pub const Began: Self = Self(1);
    /// the interruption has ended
    #[doc(alias = "AVAudioSessionInterruptionTypeEnded")]
    pub const Ended: Self = Self(0);
}

unsafe impl Encode for AVAudioSessionInterruptionType {
    const ENCODING: Encoding = NSUInteger::ENCODING;
}

unsafe impl RefEncode for AVAudioSessionInterruptionType {
    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
}

/// Values for AVAudioSessionInterruptionOptionKey in AVAudioSessionInterruptionNotification's
/// userInfo dictionary.
///
/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioninterruptionoptions?language=objc)
// NS_OPTIONS
#[repr(transparent)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct AVAudioSessionInterruptionOptions(pub NSUInteger);
bitflags::bitflags! {
    impl AVAudioSessionInterruptionOptions: NSUInteger {
/// Indicates that you should resume playback now that the interruption has ended.
        #[doc(alias = "AVAudioSessionInterruptionOptionShouldResume")]
        const ShouldResume = 1;
    }
}

unsafe impl Encode for AVAudioSessionInterruptionOptions {
    const ENCODING: Encoding = NSUInteger::ENCODING;
}

unsafe impl RefEncode for AVAudioSessionInterruptionOptions {
    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
}

/// Values for AVAudioSessionInterruptionReasonKey in AVAudioSessionInterruptionNotification's userInfo dictionary.
///
///
/// The audio session was interrupted because another session was activated.
///
///
/// The audio session was interrupted due to the app being suspended by the operating sytem.
/// Deprecated. Interruption notifications with reason 'wasSuspended' not present from iOS 16 onwards.
///
///
/// The audio session was interrupted due to the built-in mic being muted e.g. due to an iPad's Smart Folio being closed.
///
///
/// The audio session was interrupted due to route getting disconnected.
///
///
/// The audio session was interrupted due to device being doffed or locked.
///
/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioninterruptionreason?language=objc)
// NS_ENUM
#[repr(transparent)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct AVAudioSessionInterruptionReason(pub NSUInteger);
impl AVAudioSessionInterruptionReason {
    #[doc(alias = "AVAudioSessionInterruptionReasonDefault")]
    pub const Default: Self = Self(0);
    #[doc(alias = "AVAudioSessionInterruptionReasonAppWasSuspended")]
    #[deprecated = "wasSuspended reason no longer present"]
    pub const AppWasSuspended: Self = Self(1);
    #[doc(alias = "AVAudioSessionInterruptionReasonBuiltInMicMuted")]
    pub const BuiltInMicMuted: Self = Self(2);
    /// The audio session was interrupted because route was disconnected.
    #[doc(alias = "AVAudioSessionInterruptionReasonRouteDisconnected")]
    pub const RouteDisconnected: Self = Self(4);
}

unsafe impl Encode for AVAudioSessionInterruptionReason {
    const ENCODING: Encoding = NSUInteger::ENCODING;
}

unsafe impl RefEncode for AVAudioSessionInterruptionReason {
    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
}

/// options for use when calling setActive:withOptions:error:
///
/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionsetactiveoptions?language=objc)
// NS_OPTIONS
#[repr(transparent)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct AVAudioSessionSetActiveOptions(pub NSUInteger);
bitflags::bitflags! {
    impl AVAudioSessionSetActiveOptions: NSUInteger {
/// Notify an interrupted app that the interruption has ended and it may resume playback. Only
/// valid on session deactivation.
        #[doc(alias = "AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation")]
        const NotifyOthersOnDeactivation = 1;
    }
}

unsafe impl Encode for AVAudioSessionSetActiveOptions {
    const ENCODING: Encoding = NSUInteger::ENCODING;
}

unsafe impl RefEncode for AVAudioSessionSetActiveOptions {
    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
}

/// Values for AVAudioSessionSilenceSecondaryAudioHintTypeKey in
/// AVAudioSessionSilenceSecondaryAudioHintNotification's userInfo dictionary, to indicate whether
/// optional secondary audio muting should begin or end.
///
/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionsilencesecondaryaudiohinttype?language=objc)
// NS_ENUM
#[repr(transparent)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct AVAudioSessionSilenceSecondaryAudioHintType(pub NSUInteger);
impl AVAudioSessionSilenceSecondaryAudioHintType {
    /// Another application's primary audio has started.
    #[doc(alias = "AVAudioSessionSilenceSecondaryAudioHintTypeBegin")]
    pub const Begin: Self = Self(1);
    /// Another application's primary audio has stopped.
    #[doc(alias = "AVAudioSessionSilenceSecondaryAudioHintTypeEnd")]
    pub const End: Self = Self(0);
}

unsafe impl Encode for AVAudioSessionSilenceSecondaryAudioHintType {
    const ENCODING: Encoding = NSUInteger::ENCODING;
}

unsafe impl RefEncode for AVAudioSessionSilenceSecondaryAudioHintType {
    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
}

/// Values to be used by setAggregatedIOPreference:error: method.
///
/// Starting in iOS 10, applications that use AVCaptureSession on iPads and iPhones that
/// support taking Live Photos, will have non-aggregated audio I/O unless the app opts out by
/// setting its AVAudioSessionIOType to Aggregated. Non-aggregated audio I/O means that separate
/// threads will be used to service audio I/O for input and output directions.
///
/// Note that in cases where the I/O is not aggregated, the sample rate and IO buffer duration
/// properties will map to the output audio device. In this scenario, the input and
/// output audio hardware may be running at different sample rates and with different IO buffer
/// durations. If your app requires input and output audio to be presented in the same realtime
/// I/O callback, or requires that input and output audio have the same sample rate or IO buffer
/// duration, or if your app requires the ability to set a preferred sample rate or IO buffer duration
/// for audio input, set the AVAudioSessionIOType to Aggregated.
///
/// Apps that don't use AVCaptureSession and use AVAudioSessionCategoryPlayAndRecord will continue
/// to have aggregated audio I/O, as in previous versions of iOS.
///
///
/// The default value.  If your app does not use AVCaptureSession or does not have any specific
/// requirement for aggregating input and output audio in the same realtime I/O callback, use this
/// value. Note that if your app does not use AVCaptureSession, it will get aggregated I/O when using
/// AVAudioSessionCategoryPlayAndRecord.
///
/// If your app does utilize AVCaptureSession, use of this value will allow AVCaptureSession to
/// start recording without glitching already running output audio and will allow the system to
/// utilize power-saving optimizations.
///
///
/// Use this value if your session uses AVAudioSessionCategoryPlayAndRecord and requires input and
/// output audio to be presented in the same realtime I/O callback. For example, if your app will be using
/// a RemoteIO with both input and output enabled.
///
/// Note that your session's preference to use aggregated IO will not be honored if it specifies
/// AVAudioSessionCategoryOptionMixWithOthers AND another app's audio session was already active
/// with non-mixable, non-aggregated input/output.
///
/// Added in iOS 10.0. Not applicable on watchos, tvos, macos.
///
/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessioniotype?language=objc)
// NS_ENUM
#[repr(transparent)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct AVAudioSessionIOType(pub NSUInteger);
impl AVAudioSessionIOType {
    #[doc(alias = "AVAudioSessionIOTypeNotSpecified")]
    pub const NotSpecified: Self = Self(0);
    #[doc(alias = "AVAudioSessionIOTypeAggregated")]
    pub const Aggregated: Self = Self(1);
}

unsafe impl Encode for AVAudioSessionIOType {
    const ENCODING: Encoding = NSUInteger::ENCODING;
}

unsafe impl RefEncode for AVAudioSessionIOType {
    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
}

/// Starting in iOS 11, tvOS 11, and watchOS 5, the route sharing policy allows a session
/// to specify that its output audio should be routed somewhere other than the default system output,
/// when appropriate alternative routes are available.
///
/// Follow normal rules for routing audio output.
///
/// Route output to the shared long-form audio output. A session whose primary use case is as a
/// music or podcast player may use this value to play to the same output as the built-in Music (iOS),
/// Podcasts, or iTunes (macOS) applications. Typically applications that use this policy will also
/// want sign up for remote control events as documented in “Event Handling Guide for UIKit Apps”
/// and will want to utilize MediaPlayer framework’s MPNowPlayingInfoCenter class. All applications
/// on the system that use the long-form audio route sharing policy will have their audio routed to the
/// same location.
/// Apps running on watchOS using this policy will also be able to play audio in the background,
/// as long as an eligible audio route can be activated. Apps running on watchOS using this policy
/// must use -activateWithOptions:completionHandler: instead of -setActive:withOptions:error: in
/// order to ensure that the user will be given the opportunity to pick an appropriate audio route
/// in cases where the system is unable to automatically pick the route.
///
/// Deprecated. Replaced by AVAudioSessionRouteSharingPolicyLongFormAudio.
///
/// Applications should not attempt to set this value directly. On iOS, this value will be set by
/// the system in cases where route picker UI is used to direct video to a wireless route.
///
/// Route output to the shared long-form video output. A session whose primary use case is as a
/// movie or other long-form video content player may use this value to play to the same output as
/// other long-form video content applications such as the built-in TV (iOS) application. Applications
/// that use this policy will also want to also set the AVInitialRouteSharingPolicy key
/// in their Info.plist to "LongFormVideo". All applications on the system that use the long-form video
/// route sharing policy will have their audio and video routed to the same location (e.g. AppleTV when
/// an AirPlay route is selected). Video content not using this route sharing policy will remain local
/// to the playback device even when long form video content is being routed to AirPlay.
///
/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionroutesharingpolicy?language=objc)
// NS_ENUM
#[repr(transparent)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct AVAudioSessionRouteSharingPolicy(pub NSUInteger);
impl AVAudioSessionRouteSharingPolicy {
    #[doc(alias = "AVAudioSessionRouteSharingPolicyDefault")]
    pub const Default: Self = Self(0);
    #[doc(alias = "AVAudioSessionRouteSharingPolicyLongFormAudio")]
    pub const LongFormAudio: Self = Self(1);
    #[doc(alias = "AVAudioSessionRouteSharingPolicyLongForm")]
    #[deprecated]
    pub const LongForm: Self = Self(AVAudioSessionRouteSharingPolicy::LongFormAudio.0);
    #[doc(alias = "AVAudioSessionRouteSharingPolicyIndependent")]
    pub const Independent: Self = Self(2);
    #[doc(alias = "AVAudioSessionRouteSharingPolicyLongFormVideo")]
    pub const LongFormVideo: Self = Self(3);
}

unsafe impl Encode for AVAudioSessionRouteSharingPolicy {
    const ENCODING: Encoding = NSUInteger::ENCODING;
}

unsafe impl RefEncode for AVAudioSessionRouteSharingPolicy {
    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
}

/// The prompt style is a hint to sessions that use AVAudioSessionModeVoicePrompt to modify the type of
/// prompt they play in response to other audio activity on the system, such as Siri or phone calls.
/// Sessions that issue voice prompts are encouraged to pay attention to changes in the prompt style and
/// modify their prompts in response. Apple encourages the use of non-verbal prompts when the Short
/// style is requested.
///
/// Indicates that another session is actively using microphone input and would be negatively impacted
/// by having prompts play at that time. For example if Siri is recognizing speech, having navigation or
/// exercise prompts play, could interfere with its ability to accurately recognize the user’s speech.
/// Client sessions should refrain from playing any prompts while the prompt style is None.
///
/// Indicates one of three states: Siri is active but not recording, voicemail playback is active, or
/// voice call is active. Short, non-verbal versions of prompts should be used.
///
/// Indicates that normal (long, verbal) versions of prompts may be used.
///
/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionpromptstyle?language=objc)
// NS_ENUM
#[repr(transparent)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct AVAudioSessionPromptStyle(pub NSUInteger);
impl AVAudioSessionPromptStyle {
    #[doc(alias = "AVAudioSessionPromptStyleNone")]
    pub const None: Self = Self(0x6e6f6e65);
    #[doc(alias = "AVAudioSessionPromptStyleShort")]
    pub const Short: Self = Self(0x73687274);
    #[doc(alias = "AVAudioSessionPromptStyleNormal")]
    pub const Normal: Self = Self(0x6e726d6c);
}

unsafe impl Encode for AVAudioSessionPromptStyle {
    const ENCODING: Encoding = NSUInteger::ENCODING;
}

unsafe impl RefEncode for AVAudioSessionPromptStyle {
    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
}

/// Constants indicating stereo input audio orientation, for use with built-in mic input data sources with a stereo polar pattern selected.
///
///
/// Indicates that audio capture orientation is not applicable (on mono capture, for instance).
///
/// Indicates that audio capture should be oriented vertically, Lightning connector on the bottom.
///
/// Indicates that audio capture should be oriented vertically, Lightning connector on the top.
///
/// Indicates that audio capture should be oriented horizontally, Lightning connector on the right.
///
/// Indicates that audio capture should be oriented horizontally, Lightning connector on the left.
///
/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiostereoorientation?language=objc)
// NS_ENUM
#[repr(transparent)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct AVAudioStereoOrientation(pub NSInteger);
impl AVAudioStereoOrientation {
    #[doc(alias = "AVAudioStereoOrientationNone")]
    pub const None: Self = Self(0);
    #[doc(alias = "AVAudioStereoOrientationPortrait")]
    pub const Portrait: Self = Self(1);
    #[doc(alias = "AVAudioStereoOrientationPortraitUpsideDown")]
    pub const PortraitUpsideDown: Self = Self(2);
    #[doc(alias = "AVAudioStereoOrientationLandscapeRight")]
    pub const LandscapeRight: Self = Self(3);
    #[doc(alias = "AVAudioStereoOrientationLandscapeLeft")]
    pub const LandscapeLeft: Self = Self(4);
}

unsafe impl Encode for AVAudioStereoOrientation {
    const ENCODING: Encoding = NSInteger::ENCODING;
}

unsafe impl RefEncode for AVAudioStereoOrientation {
    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
}

/// These are the values returned by recordPermission.
///
/// The user has not yet been asked for permission.
///
/// The user has been asked and has denied permission.
///
/// The user has been asked and has granted permission.
///
/// Introduced: ios(8.0), watchos(4.0)
///
/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionrecordpermission?language=objc)
// NS_ENUM
#[repr(transparent)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct AVAudioSessionRecordPermission(pub NSUInteger);
impl AVAudioSessionRecordPermission {
    #[doc(alias = "AVAudioSessionRecordPermissionUndetermined")]
    #[deprecated]
    pub const Undetermined: Self = Self(0x756e6474);
    #[doc(alias = "AVAudioSessionRecordPermissionDenied")]
    #[deprecated]
    pub const Denied: Self = Self(0x64656e79);
    #[doc(alias = "AVAudioSessionRecordPermissionGranted")]
    #[deprecated]
    pub const Granted: Self = Self(0x67726e74);
}

unsafe impl Encode for AVAudioSessionRecordPermission {
    const ENCODING: Encoding = NSUInteger::ENCODING;
}

unsafe impl RefEncode for AVAudioSessionRecordPermission {
    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
}

/// [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionrenderingmode?language=objc)
// NS_ENUM
#[repr(transparent)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct AVAudioSessionRenderingMode(pub NSInteger);
impl AVAudioSessionRenderingMode {
    /// Default Mode when no asset is loaded or playing
    #[doc(alias = "AVAudioSessionRenderingModeNotApplicable")]
    pub const NotApplicable: Self = Self(0);
    /// Default mode for non multi-channel cases
    #[doc(alias = "AVAudioSessionRenderingModeMonoStereo")]
    pub const MonoStereo: Self = Self(1);
    /// Default mode for multi-channel cases that do not fall into the modes below
    #[doc(alias = "AVAudioSessionRenderingModeSurround")]
    pub const Surround: Self = Self(2);
    /// Fallback mode if provided content is Dolby variant but hardware capabilities don't support it
    #[doc(alias = "AVAudioSessionRenderingModeSpatialAudio")]
    pub const SpatialAudio: Self = Self(3);
    /// Dolby Audio mode
    #[doc(alias = "AVAudioSessionRenderingModeDolbyAudio")]
    pub const DolbyAudio: Self = Self(4);
    /// Dolby Atmos mode
    #[doc(alias = "AVAudioSessionRenderingModeDolbyAtmos")]
    pub const DolbyAtmos: Self = Self(5);
}

unsafe impl Encode for AVAudioSessionRenderingMode {
    const ENCODING: Encoding = NSInteger::ENCODING;
}

unsafe impl RefEncode for AVAudioSessionRenderingMode {
    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
}

/// Various modes to inject audio coming from a session to another app’s input stream
///
/// Applications can state their intent to mix locally generated audio, which should consist primarily of
/// synthesized speech, to another app's input stream. This feature is intended to be used by accessibility apps
/// implementing augmentative and alternative communication systems that enable users with disabilities to
/// communicate with synthesized speech. When input is muted, microphone injection will also be muted.
///
/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosessionmicrophoneinjectionmode?language=objc)
// NS_ENUM
#[repr(transparent)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct AVAudioSessionMicrophoneInjectionMode(pub NSInteger);
impl AVAudioSessionMicrophoneInjectionMode {
    /// Default state, microphone injection is not preferred
    #[doc(alias = "AVAudioSessionMicrophoneInjectionModeNone")]
    pub const None: Self = Self(0);
    /// Inject Spoken Audio, like synthesized speech, with microphone audio
    #[doc(alias = "AVAudioSessionMicrophoneInjectionModeSpokenAudio")]
    pub const SpokenAudio: Self = Self(1);
}

unsafe impl Encode for AVAudioSessionMicrophoneInjectionMode {
    const ENCODING: Encoding = NSInteger::ENCODING;
}

unsafe impl RefEncode for AVAudioSessionMicrophoneInjectionMode {
    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
}