1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
//! This file has been automatically generated by `objc2`'s `header-translator`.
//! DO NOT EDIT
use core::ffi::*;
use core::ptr::NonNull;
use objc2::__framework_prelude::*;
#[cfg(feature = "objc2-audio-toolbox")]
#[cfg(not(target_os = "watchos"))]
use objc2_audio_toolbox::*;
use objc2_foundation::*;
use crate::*;
/// A block that receives copies of the output of an AVAudioNode.
///
/// Parameter `buffer`: a buffer of audio captured from the output of an AVAudioNode
///
/// Parameter `when`: the time at which the buffer was captured
///
/// CAUTION: This callback may be invoked on a thread other than the main thread.
///
/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudionodetapblock?language=objc)
#[cfg(all(feature = "AVAudioBuffer", feature = "AVAudioTime", feature = "block2"))]
pub type AVAudioNodeTapBlock =
*mut block2::DynBlock<dyn Fn(NonNull<AVAudioPCMBuffer>, NonNull<AVAudioTime>)>;
extern_class!(
/// Base class for an audio generation, processing, or I/O block.
///
/// `AVAudioEngine` objects contain instances of various AVAudioNode subclasses. This
/// base class provides certain common functionality.
///
/// Nodes have input and output busses, which can be thought of as connection points.
/// For example, an effect typically has one input bus and one output bus. A mixer
/// typically has multiple input busses and one output bus.
///
/// Busses have formats, expressed in terms of sample rate and channel count. When making
/// connections between nodes, often the format must match exactly. There are exceptions
/// (e.g. `AVAudioMixerNode` and `AVAudioOutputNode`).
///
/// Nodes do not currently provide useful functionality until attached to an engine.
///
/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudionode?language=objc)
#[unsafe(super(NSObject))]
#[derive(Debug, PartialEq, Eq, Hash)]
pub struct AVAudioNode;
);
extern_conformance!(
unsafe impl NSObjectProtocol for AVAudioNode {}
);
impl AVAudioNode {
extern_methods!(
/// Clear a unit's previous processing state.
#[unsafe(method(reset))]
#[unsafe(method_family = none)]
pub unsafe fn reset(&self);
#[cfg(all(feature = "AVAudioFormat", feature = "AVAudioTypes"))]
/// Obtain an input bus's format.
#[unsafe(method(inputFormatForBus:))]
#[unsafe(method_family = none)]
pub unsafe fn inputFormatForBus(&self, bus: AVAudioNodeBus) -> Retained<AVAudioFormat>;
#[cfg(all(feature = "AVAudioFormat", feature = "AVAudioTypes"))]
/// Obtain an output bus's format.
#[unsafe(method(outputFormatForBus:))]
#[unsafe(method_family = none)]
pub unsafe fn outputFormatForBus(&self, bus: AVAudioNodeBus) -> Retained<AVAudioFormat>;
#[cfg(feature = "AVAudioTypes")]
/// Return the name of an input bus.
#[unsafe(method(nameForInputBus:))]
#[unsafe(method_family = none)]
pub unsafe fn nameForInputBus(&self, bus: AVAudioNodeBus) -> Option<Retained<NSString>>;
#[cfg(feature = "AVAudioTypes")]
/// Return the name of an output bus.
#[unsafe(method(nameForOutputBus:))]
#[unsafe(method_family = none)]
pub unsafe fn nameForOutputBus(&self, bus: AVAudioNodeBus) -> Option<Retained<NSString>>;
#[cfg(all(
feature = "AVAudioBuffer",
feature = "AVAudioFormat",
feature = "AVAudioTime",
feature = "AVAudioTypes",
feature = "block2"
))]
/// Create a "tap" to record/monitor/observe the output of the node.
///
/// Parameter `bus`: the node output bus to which to attach the tap
///
/// Parameter `bufferSize`: the requested size of the incoming buffers in sample frames. Supported range is [100, 400] ms.
///
/// Parameter `format`: If non-nil, attempts to apply this as the format of the specified output bus. This should
/// only be done when attaching to an output bus which is not connected to another node; an
/// error will result otherwise.
/// The tap and connection formats (if non-nil) on the specified bus should be identical.
/// Otherwise, the latter operation will override any previously set format.
///
/// Parameter `tapBlock`: a block to be called with audio buffers
///
///
/// Only one tap may be installed on any bus. Taps may be safely installed and removed while
/// the engine is running.
///
/// Note that if you have a tap installed on AVAudioOutputNode, there could be a mismatch
/// between the tap buffer format and AVAudioOutputNode's output format, depending on the
/// underlying physical device. Hence, instead of tapping the AVAudioOutputNode, it is
/// advised to tap the node connected to it.
///
/// E.g. to capture audio from input node:
/// <pre>
/// AVAudioEngine *engine = [[AVAudioEngine alloc] init];
/// AVAudioInputNode *input = [engine inputNode];
/// AVAudioFormat *format = [input outputFormatForBus: 0];
/// [input installTapOnBus: 0 bufferSize: 8192 format: format block: ^(AVAudioPCMBuffer *buf, AVAudioTime *when) {
/// // ‘buf' contains audio captured from input node at time 'when'
/// }];
/// ....
/// // start engine
/// </pre>
///
/// # Safety
///
/// `tap_block` must be a valid pointer.
#[unsafe(method(installTapOnBus:bufferSize:format:block:))]
#[unsafe(method_family = none)]
pub unsafe fn installTapOnBus_bufferSize_format_block(
&self,
bus: AVAudioNodeBus,
buffer_size: AVAudioFrameCount,
format: Option<&AVAudioFormat>,
tap_block: AVAudioNodeTapBlock,
);
#[cfg(feature = "AVAudioTypes")]
/// Destroy a tap.
///
/// Parameter `bus`: the node output bus whose tap is to be destroyed
#[unsafe(method(removeTapOnBus:))]
#[unsafe(method_family = none)]
pub unsafe fn removeTapOnBus(&self, bus: AVAudioNodeBus);
#[cfg(feature = "AVAudioEngine")]
/// The engine to which the node is attached (or nil).
#[unsafe(method(engine))]
#[unsafe(method_family = none)]
pub unsafe fn engine(&self) -> Option<Retained<AVAudioEngine>>;
/// The node's number of input busses.
#[unsafe(method(numberOfInputs))]
#[unsafe(method_family = none)]
pub unsafe fn numberOfInputs(&self) -> NSUInteger;
/// The node's number of output busses.
#[unsafe(method(numberOfOutputs))]
#[unsafe(method_family = none)]
pub unsafe fn numberOfOutputs(&self) -> NSUInteger;
#[cfg(feature = "AVAudioTime")]
/// Obtain the time for which the node most recently rendered.
///
/// Will return nil if the engine is not running or if the node is not connected to an input or
/// output node.
#[unsafe(method(lastRenderTime))]
#[unsafe(method_family = none)]
pub unsafe fn lastRenderTime(&self) -> Option<Retained<AVAudioTime>>;
#[cfg(feature = "objc2-audio-toolbox")]
#[cfg(not(target_os = "watchos"))]
/// An AUAudioUnit wrapping or underlying the implementation's AudioUnit.
///
/// This provides an AUAudioUnit which either wraps or underlies the implementation's
/// AudioUnit, depending on how that audio unit is packaged. Applications can interact with this
/// AUAudioUnit to control custom properties, select presets, change parameters, etc.
///
/// No operations that may conflict with state maintained by the engine should be performed
/// directly on the audio unit. These include changing initialization state, stream formats,
/// channel layouts or connections to other audio units.
#[unsafe(method(AUAudioUnit))]
#[unsafe(method_family = none)]
pub unsafe fn AUAudioUnit(&self) -> Retained<AUAudioUnit>;
/// The processing latency of the node, in seconds.
///
/// This property reflects the delay between when an impulse in the audio stream arrives at the
/// input vs. output of the node. This should reflect the delay due to signal processing
/// (e.g. filters, FFT's, etc.), not delay or reverberation which is being applied as an effect.
/// A value of zero indicates either no latency or an unknown latency.
#[unsafe(method(latency))]
#[unsafe(method_family = none)]
pub unsafe fn latency(&self) -> NSTimeInterval;
/// The maximum render pipeline latency downstream of the node, in seconds.
///
/// This describes the maximum time it will take for the audio at the output of a node to be
/// presented.
/// For instance, the output presentation latency of the output node in the engine is:
/// - zero in manual rendering mode
/// - the presentation latency of the device itself when rendering to an audio device
/// (see `AVAudioIONode(presentationLatency)`)
/// The output presentation latency of a node connected directly to the output node is the
/// output node's presentation latency plus the output node's processing latency (see `latency`).
///
/// For a node which is exclusively in the input node chain (i.e. not connected to engine's
/// output node), this property reflects the latency for the output of this node to be
/// presented at the output of the terminating node in the input chain.
///
/// A value of zero indicates either an unknown or no latency.
///
/// Note that this latency value can change as the engine is reconfigured (started/stopped,
/// connections made/altered downstream of this node etc.). So it is recommended not to cache
/// this value and fetch it whenever it's needed.
#[unsafe(method(outputPresentationLatency))]
#[unsafe(method_family = none)]
pub unsafe fn outputPresentationLatency(&self) -> NSTimeInterval;
);
}
/// Methods declared on superclass `NSObject`.
impl AVAudioNode {
extern_methods!(
#[unsafe(method(init))]
#[unsafe(method_family = init)]
pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
#[unsafe(method(new))]
#[unsafe(method_family = new)]
pub unsafe fn new() -> Retained<Self>;
);
}