objc2_avf_audio/generated/AVAudioSourceNode.rs
1//! This file has been automatically generated by `objc2`'s `header-translator`.
2//! DO NOT EDIT
3use core::ptr::NonNull;
4use objc2::__framework_prelude::*;
5#[cfg(feature = "objc2-core-audio-types")]
6use objc2_core_audio_types::*;
7
8use crate::*;
9
10/// Block to supply audio data to AVAudioSourceNode
11///
12/// Parameter `isSilence`: The client may use this flag to indicate that the buffer it vends contains only silence.
13/// The receiver of the buffer can then use the flag as a hint as to whether the buffer needs
14/// to be processed or not.
15/// Note that because the flag is only a hint, when setting the silence flag, the originator of
16/// a buffer must also ensure that it contains silence (zeroes).
17///
18/// Parameter `timestamp`: The HAL time at which the audio data will be rendered. If there is a sample rate conversion
19/// or time compression/expansion downstream, the sample time will not be valid.
20///
21/// Parameter `frameCount`: The number of sample frames of audio data requested.
22///
23/// Parameter `outputData`: The output data.
24///
25/// The caller must supply valid buffers in outputData's mBuffers' mData and mDataByteSize.
26/// mDataByteSize must be consistent with frameCount. This block may provide output in those
27/// specified buffers, or it may replace the mData pointers with pointers to memory which it
28/// owns and guarantees will remain valid until the next render cycle.
29///
30/// Returns: An OSStatus result code. If an error is returned, the audio data should be assumed to be
31/// invalid.
32///
33/// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosourcenoderenderblock?language=objc)
34#[cfg(all(
35 feature = "AVAudioTypes",
36 feature = "block2",
37 feature = "objc2-core-audio-types"
38))]
39pub type AVAudioSourceNodeRenderBlock = *mut block2::Block<
40 dyn Fn(
41 NonNull<Bool>,
42 NonNull<AudioTimeStamp>,
43 AVAudioFrameCount,
44 NonNull<AudioBufferList>,
45 ) -> OSStatus,
46>;
47
48extern_class!(
49 /// AVAudioSourceNode wraps a client provided block to supply audio.
50 ///
51 /// With AVAudioSourceNode the client can supply audio data for rendering through an
52 /// AVAudioSourceNodeRenderBlock block.
53 /// This is similar to setting the input callback on an Audio Unit with the
54 /// kAudioUnitProperty_SetRenderCallback property.
55 ///
56 /// See also [Apple's documentation](https://developer.apple.com/documentation/avfaudio/avaudiosourcenode?language=objc)
57 #[unsafe(super(AVAudioNode, NSObject))]
58 #[derive(Debug, PartialEq, Eq, Hash)]
59 #[cfg(feature = "AVAudioNode")]
60 pub struct AVAudioSourceNode;
61);
62
63#[cfg(all(feature = "AVAudioMixing", feature = "AVAudioNode"))]
64unsafe impl AVAudio3DMixing for AVAudioSourceNode {}
65
66#[cfg(all(feature = "AVAudioMixing", feature = "AVAudioNode"))]
67unsafe impl AVAudioMixing for AVAudioSourceNode {}
68
69#[cfg(all(feature = "AVAudioMixing", feature = "AVAudioNode"))]
70unsafe impl AVAudioStereoMixing for AVAudioSourceNode {}
71
72#[cfg(feature = "AVAudioNode")]
73unsafe impl NSObjectProtocol for AVAudioSourceNode {}
74
75#[cfg(feature = "AVAudioNode")]
76impl AVAudioSourceNode {
77 extern_methods!(
78 #[unsafe(method(init))]
79 #[unsafe(method_family = init)]
80 pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
81
82 #[cfg(all(
83 feature = "AVAudioTypes",
84 feature = "block2",
85 feature = "objc2-core-audio-types"
86 ))]
87 /// Create a node with a render block.
88 ///
89 /// Parameter `block`: The block to supply audio data to the output.
90 ///
91 /// The block can be called on realtime or non-realtime threads depending on the engine’s
92 /// operating mode and it is the client's responsibility to handle it in a thread-safe manner.
93 ///
94 /// The audio format for the output bus will be set from the connection format when connecting
95 /// to another node.
96 ///
97 /// The audio format for the block will be set to the node's output format. If node is
98 /// reconnected with a different output format, the audio format for the block will also change.
99 #[unsafe(method(initWithRenderBlock:))]
100 #[unsafe(method_family = init)]
101 pub unsafe fn initWithRenderBlock(
102 this: Allocated<Self>,
103 block: AVAudioSourceNodeRenderBlock,
104 ) -> Retained<Self>;
105
106 #[cfg(all(
107 feature = "AVAudioFormat",
108 feature = "AVAudioTypes",
109 feature = "block2",
110 feature = "objc2-core-audio-types"
111 ))]
112 /// Create a node with a render block.
113 ///
114 /// Parameter `format`: The format of the PCM audio data that will be supplied by the block.
115 ///
116 /// Parameter `block`: The block to supply audio data to the output.
117 ///
118 /// The block can be called on realtime or non-realtime threads depending on the engine’s
119 /// operating mode and it is the client's responsibility to handle it in a thread-safe manner.
120 ///
121 /// The audio format for the output bus will be set from the connection format when connecting
122 /// to another node.
123 ///
124 /// AVAudioSourceNode supports different audio formats for the block and output, but only
125 /// Linear PCM conversions are supported (sample rate, bit depth, interleaving).
126 #[unsafe(method(initWithFormat:renderBlock:))]
127 #[unsafe(method_family = init)]
128 pub unsafe fn initWithFormat_renderBlock(
129 this: Allocated<Self>,
130 format: &AVAudioFormat,
131 block: AVAudioSourceNodeRenderBlock,
132 ) -> Retained<Self>;
133 );
134}
135
136/// Methods declared on superclass `NSObject`.
137#[cfg(feature = "AVAudioNode")]
138impl AVAudioSourceNode {
139 extern_methods!(
140 #[unsafe(method(new))]
141 #[unsafe(method_family = new)]
142 pub unsafe fn new() -> Retained<Self>;
143 );
144}