wasapi/
api.rs

1use num_integer::Integer;
2use std::cmp;
3use std::collections::VecDeque;
4use std::mem::{size_of, ManuallyDrop};
5use std::ops::Deref;
6use std::pin::Pin;
7use std::sync::{Arc, Condvar, Mutex};
8use std::{fmt, ptr, slice};
9use windows::Win32::Foundation::{CloseHandle, E_INVALIDARG, E_NOINTERFACE, FALSE, PROPERTYKEY};
10use windows::Win32::Media::Audio::{
11    ActivateAudioInterfaceAsync, AudioCategory_Alerts, AudioCategory_Communications,
12    AudioCategory_FarFieldSpeech, AudioCategory_ForegroundOnlyMedia, AudioCategory_GameChat,
13    AudioCategory_GameEffects, AudioCategory_GameMedia, AudioCategory_Media, AudioCategory_Movie,
14    AudioCategory_Other, AudioCategory_SoundEffects, AudioCategory_Speech,
15    AudioCategory_UniformSpeech, AudioCategory_VoiceTyping, EDataFlow, ERole,
16    IAcousticEchoCancellationControl, IActivateAudioInterfaceAsyncOperation,
17    IActivateAudioInterfaceCompletionHandler, IActivateAudioInterfaceCompletionHandler_Impl,
18    IAudioClient2, IAudioEffectsManager, IAudioSessionControl2, IAudioSessionEnumerator,
19    IAudioSessionManager, IAudioSessionManager2, IMMEndpoint, PKEY_AudioEngine_DeviceFormat,
20    AUDCLNT_STREAMOPTIONS, AUDCLNT_STREAMOPTIONS_AMBISONICS, AUDCLNT_STREAMOPTIONS_MATCH_FORMAT,
21    AUDCLNT_STREAMOPTIONS_NONE, AUDCLNT_STREAMOPTIONS_RAW, AUDIOCLIENT_ACTIVATION_PARAMS,
22    AUDIOCLIENT_ACTIVATION_PARAMS_0, AUDIOCLIENT_ACTIVATION_TYPE_PROCESS_LOOPBACK,
23    AUDIOCLIENT_PROCESS_LOOPBACK_PARAMS, AUDIO_EFFECT, AUDIO_STREAM_CATEGORY,
24    PROCESS_LOOPBACK_MODE_EXCLUDE_TARGET_PROCESS_TREE,
25    PROCESS_LOOPBACK_MODE_INCLUDE_TARGET_PROCESS_TREE, VIRTUAL_AUDIO_DEVICE_PROCESS_LOOPBACK,
26};
27use windows::Win32::Media::KernelStreaming::AUDIO_EFFECT_TYPE_ACOUSTIC_ECHO_CANCELLATION;
28use windows::Win32::System::Com::CoTaskMemFree;
29use windows::Win32::System::Com::StructuredStorage::PropVariantClear;
30use windows::Win32::System::Variant::VT_BLOB;
31use windows::{
32    core::{HRESULT, PCSTR},
33    Win32::Devices::FunctionDiscovery::{
34        PKEY_DeviceInterface_FriendlyName, PKEY_Device_DeviceDesc, PKEY_Device_FriendlyName,
35    },
36    Win32::Foundation::{HANDLE, WAIT_OBJECT_0},
37    Win32::Media::Audio::{
38        eCapture, eCommunications, eConsole, eMultimedia, eRender, AudioSessionStateActive,
39        AudioSessionStateExpired, AudioSessionStateInactive, IAudioCaptureClient, IAudioClient,
40        IAudioClock, IAudioRenderClient, IAudioSessionControl, IAudioSessionEvents, IMMDevice,
41        IMMDeviceCollection, IMMDeviceEnumerator, MMDeviceEnumerator,
42        AUDCLNT_BUFFERFLAGS_DATA_DISCONTINUITY, AUDCLNT_BUFFERFLAGS_SILENT,
43        AUDCLNT_BUFFERFLAGS_TIMESTAMP_ERROR, AUDCLNT_SHAREMODE_EXCLUSIVE, AUDCLNT_SHAREMODE_SHARED,
44        AUDCLNT_STREAMFLAGS_AUTOCONVERTPCM, AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
45        AUDCLNT_STREAMFLAGS_LOOPBACK, AUDCLNT_STREAMFLAGS_SRC_DEFAULT_QUALITY, DEVICE_STATE_ACTIVE,
46        DEVICE_STATE_DISABLED, DEVICE_STATE_NOTPRESENT, DEVICE_STATE_UNPLUGGED, WAVEFORMATEX,
47        WAVEFORMATEXTENSIBLE,
48    },
49    Win32::Media::KernelStreaming::WAVE_FORMAT_EXTENSIBLE,
50    Win32::System::Com::StructuredStorage::{
51        PropVariantToStringAlloc, PROPVARIANT, PROPVARIANT_0, PROPVARIANT_0_0, PROPVARIANT_0_0_0,
52    },
53    Win32::System::Com::{
54        CoCreateInstance, CoInitializeEx, CoUninitialize, CLSCTX_ALL, COINIT_APARTMENTTHREADED,
55        COINIT_MULTITHREADED,
56    },
57    Win32::System::Com::{BLOB, STGM_READ},
58    Win32::System::Threading::{CreateEventA, WaitForSingleObject},
59};
60use windows_core::{implement, IUnknown, Interface, Ref, HSTRING, PCWSTR};
61
62use crate::{make_channelmasks, AudioSessionEvents, EventCallbacks, WasapiError, WaveFormat};
63
64pub(crate) type WasapiRes<T> = Result<T, WasapiError>;
65
66/// Initializes COM for use by the calling thread for the multi-threaded apartment (MTA).
67pub fn initialize_mta() -> HRESULT {
68    unsafe { CoInitializeEx(None, COINIT_MULTITHREADED) }
69}
70
71/// Initializes COM for use by the calling thread for a single-threaded apartment (STA).
72pub fn initialize_sta() -> HRESULT {
73    unsafe { CoInitializeEx(None, COINIT_APARTMENTTHREADED) }
74}
75
76/// Close the COM library on the current thread.
77pub fn deinitialize() {
78    unsafe { CoUninitialize() }
79}
80
81/// Audio direction, playback or capture.
82#[derive(Clone, Copy, Debug, Eq, PartialEq)]
83pub enum Direction {
84    Render,
85    Capture,
86}
87
88impl fmt::Display for Direction {
89    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
90        match *self {
91            Direction::Render => write!(f, "Render"),
92            Direction::Capture => write!(f, "Capture"),
93        }
94    }
95}
96
97impl TryFrom<&EDataFlow> for Direction {
98    type Error = WasapiError;
99
100    fn try_from(value: &EDataFlow) -> Result<Self, Self::Error> {
101        match value {
102            EDataFlow(0) => Ok(Self::Render),
103            EDataFlow(1) => Ok(Self::Capture),
104            // EDataFlow(2) => All/Both,
105            x => Err(WasapiError::IllegalDeviceDirection(x.0)),
106        }
107    }
108}
109impl TryFrom<EDataFlow> for Direction {
110    type Error = WasapiError;
111
112    fn try_from(value: EDataFlow) -> Result<Self, Self::Error> {
113        Self::try_from(&value)
114    }
115}
116
117impl From<&Direction> for EDataFlow {
118    fn from(value: &Direction) -> Self {
119        match value {
120            Direction::Capture => eCapture,
121            Direction::Render => eRender,
122        }
123    }
124}
125impl From<Direction> for EDataFlow {
126    fn from(value: Direction) -> Self {
127        Self::from(&value)
128    }
129}
130
131/// Wrapper for [ERole](https://learn.microsoft.com/en-us/windows/win32/api/mmdeviceapi/ne-mmdeviceapi-erole).
132/// Console is the role used by most applications
133#[derive(Clone, Copy, Debug, Eq, PartialEq)]
134pub enum Role {
135    Console,
136    Multimedia,
137    Communications,
138}
139
140impl fmt::Display for Role {
141    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
142        match *self {
143            Role::Console => write!(f, "Console"),
144            Role::Multimedia => write!(f, "Multimedia"),
145            Role::Communications => write!(f, "Communications"),
146        }
147    }
148}
149
150impl TryFrom<&ERole> for Role {
151    type Error = WasapiError;
152
153    fn try_from(value: &ERole) -> Result<Self, Self::Error> {
154        match value {
155            ERole(0) => Ok(Self::Console),
156            ERole(1) => Ok(Self::Multimedia),
157            ERole(2) => Ok(Self::Communications),
158            x => Err(WasapiError::IllegalDeviceRole(x.0)),
159        }
160    }
161}
162impl TryFrom<ERole> for Role {
163    type Error = WasapiError;
164
165    fn try_from(value: ERole) -> Result<Self, Self::Error> {
166        Self::try_from(&value)
167    }
168}
169
170impl From<&Role> for ERole {
171    fn from(value: &Role) -> Self {
172        match value {
173            Role::Communications => eCommunications,
174            Role::Multimedia => eMultimedia,
175            Role::Console => eConsole,
176        }
177    }
178}
179impl From<Role> for ERole {
180    fn from(value: Role) -> Self {
181        Self::from(&value)
182    }
183}
184
185/// Helper enum for initializing an [AudioClient].
186/// There are four main modes that can be specified,
187/// corresponding to the four possible combinations of sharing mode and timing.
188/// The enum variants only expose the parameters that can be set in each mode.
189#[derive(Clone, Copy, Debug, Eq, PartialEq)]
190pub enum StreamMode {
191    /// Shared mode using polling for timing.
192    /// The parameters that can be set are the device buffer duration (in units on 100 ns)
193    /// and whether automatic format conversion should be enabled.
194    /// The audio engine decides the period, and this cannot be changed.
195    PollingShared {
196        autoconvert: bool,
197        buffer_duration_hns: i64,
198    },
199    /// Exclusive mode using polling for timing.
200    /// Both device period and buffer duration are given, in units of 100 ns.
201    PollingExclusive {
202        buffer_duration_hns: i64,
203        period_hns: i64,
204    },
205    /// Shared mode using event driven timing.
206    /// The parameters that can be set are the device buffer duration (in units on 100 ns)
207    /// and whether automatic format conversion should be enabled.
208    /// The audio engine decides the period, and this cannot be changed.
209    EventsShared {
210        autoconvert: bool,
211        buffer_duration_hns: i64,
212    },
213    /// Exclusive mode using event driven timing.
214    /// The period and buffer duration must be set to the same value.
215    /// Only device period is given here, in units of 100 ns.
216    EventsExclusive { period_hns: i64 },
217}
218
219/// Sharemode for device
220#[derive(Clone, Copy, Debug, Eq, PartialEq)]
221pub enum ShareMode {
222    Shared,
223    Exclusive,
224}
225
226/// Timing mode for device
227#[derive(Clone, Copy, Debug, Eq, PartialEq)]
228pub enum TimingMode {
229    Polling,
230    Events,
231}
232
233impl fmt::Display for ShareMode {
234    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
235        match *self {
236            ShareMode::Shared => write!(f, "Shared"),
237            ShareMode::Exclusive => write!(f, "Exclusive"),
238        }
239    }
240}
241
242/// Sample type, float or integer
243#[derive(Clone, Copy, Debug, Eq, PartialEq)]
244pub enum SampleType {
245    Float,
246    Int,
247}
248
249impl fmt::Display for SampleType {
250    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
251        match *self {
252            SampleType::Float => write!(f, "Float"),
253            SampleType::Int => write!(f, "Int"),
254        }
255    }
256}
257
258/// Possible states for an [AudioSessionControl], an enum representing the
259/// [AudioSessionStateXxx constants](https://learn.microsoft.com/en-us/windows/win32/api/audiosessiontypes/ne-audiosessiontypes-audiosessionstate)
260#[derive(Debug, Eq, PartialEq)]
261pub enum SessionState {
262    /// The audio session is active. (At least one of the streams in the session is running.)
263    Active,
264    /// The audio session is inactive. (It contains at least one stream, but none of the streams in the session is currently running.)
265    Inactive,
266    /// The audio session has expired. (It contains no streams.)
267    Expired,
268}
269
270impl fmt::Display for SessionState {
271    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
272        match *self {
273            SessionState::Active => write!(f, "Active"),
274            SessionState::Inactive => write!(f, "Inactive"),
275            SessionState::Expired => write!(f, "Expired"),
276        }
277    }
278}
279
280/// Possible states for an [IMMDevice], an enum representing the
281/// [DEVICE_STATE_XXX constants](https://learn.microsoft.com/en-us/windows/win32/coreaudio/device-state-xxx-constants)
282#[derive(Debug, Eq, PartialEq)]
283pub enum DeviceState {
284    /// The audio endpoint device is active. That is, the audio adapter that connects to the
285    /// endpoint device is present and enabled. In addition, if the endpoint device plugs int
286    /// a jack on the adapter, then the endpoint device is plugged in.
287    Active,
288    /// The audio endpoint device is disabled. The user has disabled the device in the Windows
289    /// multimedia control panel, Mmsys.cpl
290    Disabled,
291    /// The audio endpoint device is not present because the audio adapter that connects to the
292    /// endpoint device has been removed from the system, or the user has disabled the adapter
293    /// device in Device Manager.
294    NotPresent,
295    /// The audio endpoint device is unplugged. The audio adapter that contains the jack for the
296    /// endpoint device is present and enabled, but the endpoint device is not plugged into the
297    /// jack. Only a device with jack-presence detection can be in this state.
298    Unplugged,
299}
300
301impl fmt::Display for DeviceState {
302    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
303        match *self {
304            DeviceState::Active => write!(f, "Active"),
305            DeviceState::Disabled => write!(f, "Disabled"),
306            DeviceState::NotPresent => write!(f, "NotPresent"),
307            DeviceState::Unplugged => write!(f, "Unplugged"),
308        }
309    }
310}
311
312/// Calculate a period in units of 100ns that corresponds to the given number of buffer frames at the given sample rate.
313/// See the [IAudioClient documentation](https://learn.microsoft.com/en-us/windows/win32/api/audioclient/nf-audioclient-iaudioclient-initialize#remarks).
314pub fn calculate_period_100ns(frames: i64, samplerate: i64) -> i64 {
315    ((10000.0 * 1000.0 / samplerate as f64 * frames as f64) + 0.5) as i64
316}
317
318/// Struct wrapping an [IMMDeviceEnumerator](https://learn.microsoft.com/en-us/windows/win32/api/mmdeviceapi/nn-mmdeviceapi-immdeviceenumerator)
319pub struct DeviceEnumerator {
320    enumerator: IMMDeviceEnumerator,
321}
322
323impl DeviceEnumerator {
324    /// Create a new [DeviceEnumerator]
325    pub fn new() -> WasapiRes<DeviceEnumerator> {
326        let enumerator: IMMDeviceEnumerator =
327            unsafe { CoCreateInstance(&MMDeviceEnumerator, None, CLSCTX_ALL)? };
328        Ok(DeviceEnumerator { enumerator })
329    }
330
331    /// Get an [IMMDeviceCollection] of all active playback or capture devices
332    pub fn get_device_collection(&self, direction: &Direction) -> WasapiRes<DeviceCollection> {
333        let dir: EDataFlow = direction.into();
334        let devs = unsafe {
335            self.enumerator
336                .EnumAudioEndpoints(dir, DEVICE_STATE_ACTIVE)?
337        };
338        Ok(DeviceCollection {
339            collection: devs,
340            direction: *direction,
341        })
342    }
343
344    /// Get the default playback or capture device for the console role
345    pub fn get_default_device(&self, direction: &Direction) -> WasapiRes<Device> {
346        self.get_default_device_for_role(direction, &Role::Console)
347    }
348
349    /// Get the default playback or capture device for a specific role
350    pub fn get_default_device_for_role(
351        &self,
352        direction: &Direction,
353        role: &Role,
354    ) -> WasapiRes<Device> {
355        let dir = direction.into();
356        let e_role = role.into();
357
358        let device = unsafe { self.enumerator.GetDefaultAudioEndpoint(dir, e_role)? };
359
360        let dev = Device {
361            device,
362            direction: *direction,
363        };
364        debug!("default device {:?}", dev.get_friendlyname());
365        Ok(dev)
366    }
367
368    /// Get the device of a given Id. The Id can be obtained by calling [Device::get_id()]
369    pub fn get_device(&self, device_id: &str) -> WasapiRes<Device> {
370        let w_id = PCWSTR::from_raw(HSTRING::from(device_id).as_ptr());
371        let immdevice = unsafe { self.enumerator.GetDevice(w_id)? };
372        let device = Device::from_immdevice(immdevice)?;
373        Ok(device)
374    }
375}
376
377/// Struct wrapping an [IMMDeviceCollection](https://docs.microsoft.com/en-us/windows/win32/api/mmdeviceapi/nn-mmdeviceapi-immdevicecollection).
378pub struct DeviceCollection {
379    collection: IMMDeviceCollection,
380    direction: Direction,
381}
382
383impl DeviceCollection {
384    /// Get the number of devices in an [IMMDeviceCollection]
385    pub fn get_nbr_devices(&self) -> WasapiRes<u32> {
386        let count = unsafe { self.collection.GetCount()? };
387        Ok(count)
388    }
389
390    /// Get a device from an [IMMDeviceCollection] using index
391    pub fn get_device_at_index(&self, idx: u32) -> WasapiRes<Device> {
392        let device = unsafe { self.collection.Item(idx)? };
393        Ok(Device {
394            device,
395            direction: self.direction,
396        })
397    }
398
399    /// Get a device from an [IMMDeviceCollection] using name
400    pub fn get_device_with_name(&self, name: &str) -> WasapiRes<Device> {
401        let count = unsafe { self.collection.GetCount()? };
402        trace!("nbr devices {count}");
403        for n in 0..count {
404            let device = self.get_device_at_index(n)?;
405            let devname = device.get_friendlyname()?;
406            if name == devname {
407                return Ok(device);
408            }
409        }
410        Err(WasapiError::DeviceNotFound(name.to_owned()))
411    }
412
413    /// Get the direction for this [DeviceCollection]
414    pub fn get_direction(&self) -> Direction {
415        self.direction
416    }
417}
418
419/// Iterator for [DeviceCollection]
420pub struct DeviceCollectionIter<'a> {
421    collection: &'a DeviceCollection,
422    index: u32,
423}
424
425impl Iterator for DeviceCollectionIter<'_> {
426    type Item = WasapiRes<Device>;
427
428    fn next(&mut self) -> Option<Self::Item> {
429        if self.index < self.collection.get_nbr_devices().unwrap() {
430            let device = self.collection.get_device_at_index(self.index);
431            self.index += 1;
432            Some(device)
433        } else {
434            None
435        }
436    }
437}
438
439/// Implement iterator for [DeviceCollection]
440impl<'a> IntoIterator for &'a DeviceCollection {
441    type Item = WasapiRes<Device>;
442    type IntoIter = DeviceCollectionIter<'a>;
443
444    fn into_iter(self) -> Self::IntoIter {
445        DeviceCollectionIter {
446            collection: self,
447            index: 0,
448        }
449    }
450}
451
452/// Struct wrapping an [IMMDevice](https://docs.microsoft.com/en-us/windows/win32/api/mmdeviceapi/nn-mmdeviceapi-immdevice).
453pub struct Device {
454    device: IMMDevice,
455    direction: Direction,
456}
457
458impl Device {
459    /// Build a [Device] from a supplied [IMMDevice] and [Direction]
460    ///
461    /// # Safety
462    ///
463    /// The caller must ensure that the [IMMDevice]'s data flow direction
464    /// is the same as the [Direction] supplied to the function.
465    ///
466    /// Use [Device::from_immdevice], which queries the endpoint, for safe construction.
467    pub unsafe fn from_raw(device: IMMDevice, direction: Direction) -> Device {
468        Device { device, direction }
469    }
470
471    /// Attempts to build a [Device] from a supplied [IMMDevice],
472    /// querying the endpoint for its data flow direction.
473    pub fn from_immdevice(device: IMMDevice) -> WasapiRes<Device> {
474        let endpoint: IMMEndpoint = device.cast()?;
475        let direction: Direction = unsafe { endpoint.GetDataFlow()? }.try_into()?;
476
477        Ok(Device { device, direction })
478    }
479
480    /// Get an [IAudioClient] from an [IMMDevice]
481    pub fn get_iaudioclient(&self) -> WasapiRes<AudioClient> {
482        let audio_client = unsafe { self.device.Activate::<IAudioClient>(CLSCTX_ALL, None)? };
483        Ok(AudioClient {
484            client: audio_client,
485            direction: self.direction,
486            sharemode: None,
487            timingmode: None,
488            bytes_per_frame: None,
489        })
490    }
491
492    /// Gets an [IAudioSessionManager] from an [IMMDevice]
493    pub fn get_iaudiosessionmanager(&self) -> WasapiRes<AudioSessionManager> {
494        let session_manager = unsafe {
495            self.device
496                .Activate::<IAudioSessionManager>(CLSCTX_ALL, None)?
497        };
498        Ok(AudioSessionManager { session_manager })
499    }
500
501    /// Read state from an [IMMDevice]
502    pub fn get_state(&self) -> WasapiRes<DeviceState> {
503        let state = unsafe { self.device.GetState()? };
504        trace!("state: {state:?}");
505        let state_enum = match state {
506            _ if state == DEVICE_STATE_ACTIVE => DeviceState::Active,
507            _ if state == DEVICE_STATE_DISABLED => DeviceState::Disabled,
508            _ if state == DEVICE_STATE_NOTPRESENT => DeviceState::NotPresent,
509            _ if state == DEVICE_STATE_UNPLUGGED => DeviceState::Unplugged,
510            x => return Err(WasapiError::IllegalDeviceState(x.0)),
511        };
512        Ok(state_enum)
513    }
514
515    /// Read the friendly name of the endpoint device (for example, "Speakers (XYZ Audio Adapter)")
516    pub fn get_friendlyname(&self) -> WasapiRes<String> {
517        self.get_string_property(&PKEY_Device_FriendlyName)
518    }
519
520    /// Read the friendly name of the audio adapter to which the endpoint device is attached (for example, "XYZ Audio Adapter")
521    pub fn get_interface_friendlyname(&self) -> WasapiRes<String> {
522        self.get_string_property(&PKEY_DeviceInterface_FriendlyName)
523    }
524
525    /// Read the device description of the endpoint device (for example, "Speakers")
526    pub fn get_description(&self) -> WasapiRes<String> {
527        self.get_string_property(&PKEY_Device_DeviceDesc)
528    }
529
530    /// Read the device format of the endpoint device, which is the format that the user has selected for the stream
531    /// that flows between the audio engine and the audio endpoint device when the device operates in shared mode.
532    pub fn get_device_format(&self) -> WasapiRes<WaveFormat> {
533        let data = self.get_blob_property(&PKEY_AudioEngine_DeviceFormat)?;
534        // SAFETY: PKEY_AudioEngine_DeviceFormat is guaranteed to be a WAVEFORMATEX structure based on MSFT docs:
535        // https://learn.microsoft.com/en-us/windows/win32/coreaudio/pkey-audioengine-deviceformat
536        let waveformatex: &WAVEFORMATEX = unsafe { &*(data.as_ptr() as *const _) };
537        WaveFormat::parse(waveformatex)
538    }
539
540    /// Read a string property from an [IMMDevice]
541    fn get_string_property(&self, key: &PROPERTYKEY) -> WasapiRes<String> {
542        self.get_property(key, Self::parse_string_property)
543    }
544
545    /// Read a BLOB property from an [IMMDevice]
546    fn get_blob_property(&self, key: &PROPERTYKEY) -> WasapiRes<Vec<u8>> {
547        self.get_property(key, Self::parse_blob_property)
548    }
549
550    /// Read a property from an [IMMDevice] and parse it
551    fn get_property<T>(
552        &self,
553        key: &PROPERTYKEY,
554        parse: impl FnOnce(&PROPVARIANT) -> WasapiRes<T>,
555    ) -> WasapiRes<T> {
556        let store = unsafe { self.device.OpenPropertyStore(STGM_READ)? };
557        let mut prop = unsafe { store.GetValue(key)? };
558        let ret = parse(&prop);
559        unsafe { PropVariantClear(&mut prop) }?;
560        ret
561    }
562
563    /// Parse a device string property to String
564    fn parse_string_property(prop: &PROPVARIANT) -> WasapiRes<String> {
565        let propstr = unsafe { PropVariantToStringAlloc(prop)? };
566        let name = unsafe { propstr.to_string()? };
567        unsafe { CoTaskMemFree(Some(propstr.0 as _)) };
568        trace!("name: {name}");
569        Ok(name)
570    }
571
572    /// Parse a device blob property to Vec<u8>
573    fn parse_blob_property(prop: &PROPVARIANT) -> WasapiRes<Vec<u8>> {
574        if prop.vt() != VT_BLOB {
575            return Err(windows::core::Error::from(E_INVALIDARG).into());
576        }
577        let blob = unsafe { prop.Anonymous.Anonymous.Anonymous.blob };
578        let blob_slice = unsafe { slice::from_raw_parts(blob.pBlobData, blob.cbSize as usize) };
579        let data = blob_slice.to_vec();
580        Ok(data)
581    }
582
583    /// Get the Id of an [IMMDevice]
584    pub fn get_id(&self) -> WasapiRes<String> {
585        let idstr = unsafe { self.device.GetId()? };
586        //let wide_id = unsafe { U16CString::from_ptr_str(idstr.0) };
587        let id = unsafe { idstr.to_string()? };
588        unsafe { CoTaskMemFree(Some(idstr.0 as _)) };
589        //let id = wide_id.to_string_lossy();
590        trace!("id: {id}");
591        Ok(id)
592    }
593
594    /// Get the direction for this Device
595    pub fn get_direction(&self) -> Direction {
596        self.direction
597    }
598}
599
600#[implement(IActivateAudioInterfaceCompletionHandler)]
601struct Handler(Arc<(Mutex<bool>, Condvar)>);
602
603impl Handler {
604    pub fn new(object: Arc<(Mutex<bool>, Condvar)>) -> Handler {
605        Handler(object)
606    }
607}
608
609impl IActivateAudioInterfaceCompletionHandler_Impl for Handler_Impl {
610    fn ActivateCompleted(
611        &self,
612        _activateoperation: Ref<IActivateAudioInterfaceAsyncOperation>,
613    ) -> windows::core::Result<()> {
614        let (lock, cvar) = &*self.0;
615        let mut completed = lock.lock().unwrap();
616        *completed = true;
617        drop(completed);
618        cvar.notify_one();
619        Ok(())
620    }
621}
622
623/// Struct wrapping an [IAudioClient](https://docs.microsoft.com/en-us/windows/win32/api/audioclient/nn-audioclient-iaudioclient).
624pub struct AudioClient {
625    client: IAudioClient,
626    direction: Direction,
627    sharemode: Option<ShareMode>,
628    timingmode: Option<TimingMode>,
629    bytes_per_frame: Option<usize>,
630}
631
632impl AudioClient {
633    /// Creates a loopback capture [AudioClient] for a specific process.
634    ///
635    /// `include_tree` is equivalent to [PROCESS_LOOPBACK_MODE](https://learn.microsoft.com/en-us/windows/win32/api/audioclientactivationparams/ne-audioclientactivationparams-process_loopback_mode).
636    /// If true, the loopback capture client will capture audio from the target process and all its child processes,
637    /// if false only audio from the target process is captured.
638    ///
639    /// On versions of Windows prior to Windows 10, the thread calling this function
640    /// must called in a COM Single-Threaded Apartment (STA).
641    ///
642    /// Additionally when calling [AudioClient::initialize_client] on the client returned by this method,
643    /// the caller must use [Direction::Capture], and [ShareMode::Shared].
644    /// Finally calls to [AudioClient::get_device_period] do not work,
645    /// however the period passed by the caller to [AudioClient::initialize_client] is irrelevant.
646    ///
647    /// # Non-functional methods
648    /// In process loopback mode, the functionality of the AudioClient is limited.
649    /// The following methods either do not work, or return incorrect results:
650    /// * `get_mixformat` just returns `Not implemented`.
651    /// * `is_supported` just returns `Not implemented` even if the format and mode work.
652    /// * `is_supported_exclusive_with_quirks` just returns `Unable to find a supported format`.
653    /// * `get_device_period` just returns `Not implemented`.
654    /// * `calculate_aligned_period_near` just returns `Not implemented` even for values that would later work.
655    /// * `get_buffer_size` returns huge values like 3131961357 but no error.
656    /// * `get_current_padding` just returns `Not implemented`.
657    /// * `get_available_space_in_frames` just returns `Client has not been initialised` even if it has.
658    /// * `get_audiorenderclient` just returns `No such interface supported`.
659    /// * `get_audiosessioncontrol` just returns `No such interface supported`.
660    /// * `get_audioclock` just returns `No such interface supported`.
661    /// * `get_sharemode` always returns `None` when it should return `Shared` after initialisation.
662    ///
663    /// # Example
664    /// ```
665    /// use wasapi::{WaveFormat, SampleType, AudioClient, Direction, StreamMode, initialize_mta};
666    /// let desired_format = WaveFormat::new(32, 32, &SampleType::Float, 44100, 2, None);
667    /// let buffer_duration_hns = 200_000; // 20ms in hundreds of nanoseconds
668    /// let autoconvert = true;
669    /// let include_tree = false;
670    /// let process_id = std::process::id();
671    ///
672    /// initialize_mta().ok().unwrap(); // Don't do this on a UI thread
673    /// let mut audio_client = AudioClient::new_application_loopback_client(process_id, include_tree).unwrap();
674    /// let mode = StreamMode::EventsShared { autoconvert, buffer_duration_hns };
675    /// audio_client.initialize_client(
676    ///     &desired_format,
677    ///     &Direction::Capture,
678    ///     &mode
679    /// ).unwrap();
680    /// ```
681    pub fn new_application_loopback_client(process_id: u32, include_tree: bool) -> WasapiRes<Self> {
682        unsafe {
683            // Create audio client
684            let mut audio_client_activation_params = AUDIOCLIENT_ACTIVATION_PARAMS {
685                ActivationType: AUDIOCLIENT_ACTIVATION_TYPE_PROCESS_LOOPBACK,
686                Anonymous: AUDIOCLIENT_ACTIVATION_PARAMS_0 {
687                    ProcessLoopbackParams: AUDIOCLIENT_PROCESS_LOOPBACK_PARAMS {
688                        TargetProcessId: process_id,
689                        ProcessLoopbackMode: if include_tree {
690                            PROCESS_LOOPBACK_MODE_INCLUDE_TARGET_PROCESS_TREE
691                        } else {
692                            PROCESS_LOOPBACK_MODE_EXCLUDE_TARGET_PROCESS_TREE
693                        },
694                    },
695                },
696            };
697            let pinned_params = Pin::new(&mut audio_client_activation_params);
698
699            let raw_prop = PROPVARIANT {
700                Anonymous: PROPVARIANT_0 {
701                    Anonymous: ManuallyDrop::new(PROPVARIANT_0_0 {
702                        vt: VT_BLOB,
703                        wReserved1: 0,
704                        wReserved2: 0,
705                        wReserved3: 0,
706                        Anonymous: PROPVARIANT_0_0_0 {
707                            blob: BLOB {
708                                cbSize: size_of::<AUDIOCLIENT_ACTIVATION_PARAMS>() as u32,
709                                pBlobData: pinned_params.get_mut() as *const _ as *mut _,
710                            },
711                        },
712                    }),
713                },
714            };
715
716            let activation_prop = ManuallyDrop::new(raw_prop);
717            let pinned_prop = Pin::new(activation_prop.deref());
718            let activation_params = Some(pinned_prop.get_ref() as *const _);
719
720            // Create completion handler
721            let setup = Arc::new((Mutex::new(false), Condvar::new()));
722            let callback: IActivateAudioInterfaceCompletionHandler =
723                Handler::new(setup.clone()).into();
724
725            // Activate audio interface
726            let operation = ActivateAudioInterfaceAsync(
727                VIRTUAL_AUDIO_DEVICE_PROCESS_LOOPBACK,
728                &IAudioClient::IID,
729                activation_params,
730                &callback,
731            )?;
732
733            // Wait for completion
734            let (lock, cvar) = &*setup;
735            let mut completed = lock.lock().unwrap();
736            while !*completed {
737                completed = cvar.wait(completed).unwrap();
738            }
739            drop(completed);
740
741            // Get audio client and result
742            let mut audio_client: Option<IUnknown> = Default::default();
743            let mut result: HRESULT = Default::default();
744            operation.GetActivateResult(&mut result, &mut audio_client)?;
745
746            // Ensure successful activation
747            result.ok()?;
748            // always safe to unwrap if result above is checked first
749            let audio_client: IAudioClient = audio_client.unwrap().cast()?;
750
751            Ok(AudioClient {
752                client: audio_client,
753                direction: Direction::Render,
754                sharemode: Some(ShareMode::Shared),
755                timingmode: None,
756                bytes_per_frame: None,
757            })
758        }
759    }
760
761    /// Get MixFormat of the device. This is the format the device uses in shared mode and should always be accepted.
762    pub fn get_mixformat(&self) -> WasapiRes<WaveFormat> {
763        let temp_fmt_ptr = unsafe { self.client.GetMixFormat()? };
764        let temp_fmt = unsafe { *temp_fmt_ptr };
765        let mix_format =
766            if temp_fmt.cbSize == 22 && temp_fmt.wFormatTag as u32 == WAVE_FORMAT_EXTENSIBLE {
767                unsafe {
768                    WaveFormat {
769                        wave_fmt: (temp_fmt_ptr as *const _ as *const WAVEFORMATEXTENSIBLE).read(),
770                    }
771                }
772            } else {
773                WaveFormat::from_waveformatex(temp_fmt)?
774            };
775        Ok(mix_format)
776    }
777
778    /// Check if a format is supported.
779    /// If it's directly supported, this returns Ok(None). If not, but a similar format is, then the nearest matching supported format is returned as Ok(Some(WaveFormat)).
780    ///
781    /// NOTE: For exclusive mode, this function may not always give the right result for 1- and 2-channel formats.
782    /// From the [Microsoft documentation](https://docs.microsoft.com/en-us/windows/win32/coreaudio/device-formats#specifying-the-device-format):
783    /// > For exclusive-mode formats, the method queries the device driver.
784    /// > Some device drivers will report that they support a 1-channel or 2-channel PCM format if the format is specified by a stand-alone WAVEFORMATEX structure,
785    /// > but will reject the same format if it is specified by a WAVEFORMATEXTENSIBLE structure.
786    /// > To obtain reliable results from these drivers, exclusive-mode applications should call IsFormatSupported twice for each 1-channel or 2-channel PCM format.
787    /// > One call should use a stand-alone WAVEFORMATEX structure to specify the format, and the other call should use a WAVEFORMATEXTENSIBLE structure to specify the same format.
788    ///
789    /// If the first call fails, use [WaveFormat::to_waveformatex] to get a copy of the WaveFormat in the simpler WAVEFORMATEX representation.
790    /// Then call this function again with the new WafeFormat structure.
791    /// If the driver then reports that the format is supported, use the original WaveFormat structure when calling [AudioClient::initialize_client].
792    ///
793    /// See also the helper function [is_supported_exclusive_with_quirks](AudioClient::is_supported_exclusive_with_quirks).
794    pub fn is_supported(
795        &self,
796        wave_fmt: &WaveFormat,
797        sharemode: &ShareMode,
798    ) -> WasapiRes<Option<WaveFormat>> {
799        let supported = match sharemode {
800            ShareMode::Exclusive => {
801                unsafe {
802                    self.client
803                        .IsFormatSupported(
804                            AUDCLNT_SHAREMODE_EXCLUSIVE,
805                            wave_fmt.as_waveformatex_ref(),
806                            None,
807                        )
808                        .ok()?
809                };
810                None
811            }
812            ShareMode::Shared => {
813                let mut supported_format: *mut WAVEFORMATEX = std::ptr::null_mut();
814                unsafe {
815                    self.client
816                        .IsFormatSupported(
817                            AUDCLNT_SHAREMODE_SHARED,
818                            wave_fmt.as_waveformatex_ref(),
819                            Some(&mut supported_format),
820                        )
821                        .ok()?
822                };
823                // Check if we got a pointer to a WAVEFORMATEX structure.
824                if supported_format.is_null() {
825                    // The pointer is still null, thus the format is supported as is.
826                    debug!("The requested format is supported");
827                    None
828                } else {
829                    // Read the structure
830                    let temp_fmt: WAVEFORMATEX = unsafe { supported_format.read() };
831                    debug!("The requested format is not supported but a simular one is");
832                    let new_fmt = if temp_fmt.cbSize == 22
833                        && temp_fmt.wFormatTag as u32 == WAVE_FORMAT_EXTENSIBLE
834                    {
835                        debug!("got the nearest matching format as a WAVEFORMATEXTENSIBLE");
836                        let temp_fmt_ext: WAVEFORMATEXTENSIBLE = unsafe {
837                            (supported_format as *const _ as *const WAVEFORMATEXTENSIBLE).read()
838                        };
839                        WaveFormat {
840                            wave_fmt: temp_fmt_ext,
841                        }
842                    } else {
843                        debug!("got the nearest matching format as a WAVEFORMATEX, converting..");
844                        WaveFormat::from_waveformatex(temp_fmt)?
845                    };
846                    Some(new_fmt)
847                }
848            }
849        };
850        Ok(supported)
851    }
852
853    /// A helper function for checking if a format is supported.
854    /// It calls `is_supported` several times with different options
855    /// in order to find a format that the device accepts.
856    ///
857    /// The alternatives it tries are:
858    /// - The format as given.
859    /// - If one or two channels, try with the format as WAVEFORMATEX.
860    /// - Try with different channel masks:
861    ///   - If channels <= 8: Recommended mask(s) from ksmedia.h.
862    ///   - If channels <= 18: Simple mask.
863    ///   - Zero mask.
864    ///
865    /// If an accepted format is found, this is returned.
866    /// An error means no accepted format was found.
867    pub fn is_supported_exclusive_with_quirks(
868        &self,
869        wave_fmt: &WaveFormat,
870    ) -> WasapiRes<WaveFormat> {
871        let mut wave_fmt = wave_fmt.clone();
872        let supported_direct = self.is_supported(&wave_fmt, &ShareMode::Exclusive);
873        if supported_direct.is_ok() {
874            debug!("The requested format is supported as provided");
875            return Ok(wave_fmt);
876        }
877        if wave_fmt.get_nchannels() <= 2 {
878            debug!("Repeating query with format as WAVEFORMATEX");
879            let wave_formatex = wave_fmt.to_waveformatex().unwrap();
880            if self
881                .is_supported(&wave_formatex, &ShareMode::Exclusive)
882                .is_ok()
883            {
884                debug!("The requested format is supported as WAVEFORMATEX");
885                return Ok(wave_formatex);
886            }
887        }
888        let masks = make_channelmasks(wave_fmt.get_nchannels() as usize);
889        for mask in masks {
890            debug!("Repeating query with channel mask: {mask:#010b}");
891            wave_fmt.wave_fmt.dwChannelMask = mask;
892            if self.is_supported(&wave_fmt, &ShareMode::Exclusive).is_ok() {
893                debug!("The requested format is supported with a modified mask: {mask:#010b}");
894                return Ok(wave_fmt);
895            }
896        }
897        Err(WasapiError::UnsupportedFormat)
898    }
899
900    /// Get default and minimum periods in 100-nanosecond units
901    pub fn get_device_period(&self) -> WasapiRes<(i64, i64)> {
902        let mut def_time = 0;
903        let mut min_time = 0;
904        unsafe {
905            self.client
906                .GetDevicePeriod(Some(&mut def_time), Some(&mut min_time))?
907        };
908        trace!("default period {def_time}, min period {min_time}");
909        Ok((def_time, min_time))
910    }
911
912    #[deprecated(
913        since = "0.17.0",
914        note = "please use the new function name `get_device_period` instead"
915    )]
916    pub fn get_periods(&self) -> WasapiRes<(i64, i64)> {
917        self.get_device_period()
918    }
919
920    /// Helper function for calculating a period size in 100-nanosecond units that is near a desired value,
921    /// and always larger than the minimum value supported by the device.
922    /// The returned value leads to a device buffer size that is aligned both to the frame size of the format,
923    /// and the optional align_bytes value.
924    /// This parameter is used for devices that require the buffer size to be a multiple of a certain number of bytes.
925    /// Give None, Some(0) or Some(1) if the device has no special requirements for the alignment.
926    /// For example, all devices following the Intel High Definition Audio specification require buffer sizes in multiples of 128 bytes.
927    ///
928    /// See also the `playnoise_exclusive` example.
929    pub fn calculate_aligned_period_near(
930        &self,
931        desired_period: i64,
932        align_bytes: Option<u32>,
933        wave_fmt: &WaveFormat,
934    ) -> WasapiRes<i64> {
935        let (_default_period, min_period) = self.get_device_period()?;
936        let adjusted_desired_period = cmp::max(desired_period, min_period);
937        let frame_bytes = wave_fmt.get_blockalign();
938        let period_alignment_bytes = match align_bytes {
939            Some(0) => frame_bytes,
940            Some(bytes) => frame_bytes.lcm(&bytes),
941            None => frame_bytes,
942        };
943        let period_alignment_frames = period_alignment_bytes as i64 / frame_bytes as i64;
944        let desired_period_frames =
945            (adjusted_desired_period as f64 * wave_fmt.get_samplespersec() as f64 / 10000000.0)
946                .round() as i64;
947        let min_period_frames =
948            (min_period as f64 * wave_fmt.get_samplespersec() as f64 / 10000000.0).ceil() as i64;
949        let mut nbr_segments = desired_period_frames / period_alignment_frames;
950        if nbr_segments * period_alignment_frames < min_period_frames {
951            // Add one segment if the value got rounded down below the minimum
952            nbr_segments += 1;
953        }
954        let aligned_period = calculate_period_100ns(
955            period_alignment_frames * nbr_segments,
956            wave_fmt.get_samplespersec() as i64,
957        );
958        Ok(aligned_period)
959    }
960
961    /// Initialize an [AudioClient] for the given direction, sharemode, timing mode and format.
962    /// This method wraps [IAudioClient::Initialize()](https://learn.microsoft.com/en-us/windows/win32/api/audioclient/nf-audioclient-iaudioclient-initialize).
963    ///
964    /// ### Sharing mode
965    /// In WASAPI, sharing mode determines how multiple audio applications interact with the same audio endpoint.
966    /// There are two primary sharing modes: Shared and Exclusive.
967    /// #### Shared Mode ([ShareMode::Shared])
968    /// - Multiple applications can simultaneously access the audio device.
969    /// - The system's audio engine mixes the audio streams from all applications.
970    /// - The application has no control over the sample rate and format used by the device.
971    /// - The audio engine can perform automatic sample rate and format conversion,
972    ///   meaning that almost any format can be accepted.
973    ///
974    /// #### Exclusive Mode ([ShareMode::Exclusive])
975    /// - Only one application can access the audio device at a time.
976    /// - This mode provides lower latency but requires the device to support the exact audio format requested.
977    /// - The application can control the sample rate and format used by the device.
978    ///
979    /// ### Timing mode
980    /// Event-driven mode and polling mode are two different ways of handling audio buffer updates.
981    ///
982    /// #### Event-Driven Mode ([TimingMode::Events])
983    ///   - In this mode, the application registers an event handle using [AudioClient::set_get_eventhandle()].
984    ///   - The system signals this event whenever a new buffer of audio data is ready to be processed (either for rendering or capture).
985    ///   - The application's audio processing thread waits on this event ([Handle::wait_for_event()]).
986    ///   - When the event is signaled, the thread wakes up to processes the available data, and then goes back to waiting.
987    ///   - This mode is generally more efficient because the application only wakes up when there's work to do.
988    ///   - It's suitable for real-time audio applications where low latency is important.
989    ///   - This mode is not supported by all devices in exclusive mode (but all devices are supported in shared mode).
990    ///   - In exclusive mode, devices using the standard Windows USB audio driver can have issues
991    ///     with stuttering sound on playback.
992    ///
993    /// #### Polling Mode ([TimingMode::Polling])
994    ///   - In this mode, the application periodically calls [AudioClient::get_current_padding()] (for capture)
995    ///     or [AudioClient::get_available_space_in_frames()] (for playback)
996    ///     to check how much data is available or required.
997    ///   - The thread processes the data, and then goes to sleep, for example by calling [std::thread::sleep()].
998    ///   - This mode is less efficient and is more prone to glitches when running at low latency.
999    ///   - In exclusive mode, it supports more devices, and does not have the stuttering issue with USB audio devices.
1000    pub fn initialize_client(
1001        &mut self,
1002        wavefmt: &WaveFormat,
1003        direction: &Direction,
1004        stream_mode: &StreamMode,
1005    ) -> WasapiRes<()> {
1006        let sharemode = match stream_mode {
1007            StreamMode::PollingShared { .. } | StreamMode::EventsShared { .. } => ShareMode::Shared,
1008            StreamMode::PollingExclusive { .. } | StreamMode::EventsExclusive { .. } => {
1009                ShareMode::Exclusive
1010            }
1011        };
1012        let timing = match stream_mode {
1013            StreamMode::PollingShared { .. } | StreamMode::PollingExclusive { .. } => {
1014                TimingMode::Polling
1015            }
1016            StreamMode::EventsShared { .. } | StreamMode::EventsExclusive { .. } => {
1017                TimingMode::Events
1018            }
1019        };
1020        let mut streamflags = match (&self.direction, direction, sharemode) {
1021            (Direction::Render, Direction::Capture, ShareMode::Shared) => {
1022                AUDCLNT_STREAMFLAGS_LOOPBACK
1023            }
1024            (Direction::Render, Direction::Capture, ShareMode::Exclusive) => {
1025                return Err(WasapiError::LoopbackWithExclusiveMode);
1026            }
1027            (Direction::Capture, Direction::Render, _) => {
1028                return Err(WasapiError::RenderToCaptureDevice);
1029            }
1030            _ => 0,
1031        };
1032        match stream_mode {
1033            StreamMode::PollingShared { autoconvert, .. }
1034            | StreamMode::EventsShared { autoconvert, .. } => {
1035                if *autoconvert {
1036                    streamflags |= AUDCLNT_STREAMFLAGS_AUTOCONVERTPCM
1037                        | AUDCLNT_STREAMFLAGS_SRC_DEFAULT_QUALITY;
1038                }
1039            }
1040            _ => {}
1041        }
1042        if timing == TimingMode::Events {
1043            streamflags |= AUDCLNT_STREAMFLAGS_EVENTCALLBACK;
1044        }
1045        let mode = match sharemode {
1046            ShareMode::Exclusive => AUDCLNT_SHAREMODE_EXCLUSIVE,
1047            ShareMode::Shared => AUDCLNT_SHAREMODE_SHARED,
1048        };
1049        let (period, buffer_duration) = match stream_mode {
1050            StreamMode::PollingShared {
1051                buffer_duration_hns,
1052                ..
1053            } => (0, *buffer_duration_hns),
1054            StreamMode::EventsShared {
1055                buffer_duration_hns,
1056                ..
1057            } => (0, *buffer_duration_hns),
1058            StreamMode::PollingExclusive {
1059                period_hns,
1060                buffer_duration_hns,
1061            } => (*period_hns, *buffer_duration_hns),
1062            StreamMode::EventsExclusive { period_hns, .. } => (*period_hns, *period_hns),
1063        };
1064        unsafe {
1065            self.client.Initialize(
1066                mode,
1067                streamflags,
1068                buffer_duration,
1069                period,
1070                wavefmt.as_waveformatex_ref(),
1071                None,
1072            )?;
1073        }
1074        self.direction = *direction;
1075        self.sharemode = Some(sharemode);
1076        self.timingmode = Some(timing);
1077        self.bytes_per_frame = Some(wavefmt.get_blockalign() as usize);
1078        Ok(())
1079    }
1080
1081    /// Create and return an event handle for an [AudioClient].
1082    /// This is required when using an [AudioClient] initialized for event driven mode, [TimingMode::Events].
1083    pub fn set_get_eventhandle(&self) -> WasapiRes<Handle> {
1084        let h_event = unsafe { CreateEventA(None, false, false, PCSTR::null())? };
1085        unsafe { self.client.SetEventHandle(h_event)? };
1086        Ok(Handle { handle: h_event })
1087    }
1088
1089    /// Get buffer size in frames,
1090    /// see [IAudioClient::GetBufferSize](https://learn.microsoft.com/en-us/windows/win32/api/audioclient/nf-audioclient-iaudioclient-getbuffersize).
1091    pub fn get_buffer_size(&self) -> WasapiRes<u32> {
1092        let buffer_frame_count = unsafe { self.client.GetBufferSize()? };
1093        trace!("buffer_frame_count {buffer_frame_count}");
1094        Ok(buffer_frame_count)
1095    }
1096
1097    #[deprecated(
1098        since = "0.17.0",
1099        note = "please use the new function name `get_buffer_size` instead"
1100    )]
1101    pub fn get_bufferframecount(&self) -> WasapiRes<u32> {
1102        self.get_buffer_size()
1103    }
1104
1105    /// Get current padding in frames.
1106    /// This represents the number of frames currently in the buffer, for both capture and render devices.
1107    /// The exact meaning depends on how the AudioClient was initialized, see
1108    /// [IAudioClient::GetCurrentPadding](https://learn.microsoft.com/en-us/windows/win32/api/audioclient/nf-audioclient-iaudioclient-getcurrentpadding).
1109    pub fn get_current_padding(&self) -> WasapiRes<u32> {
1110        let padding_count = unsafe { self.client.GetCurrentPadding()? };
1111        trace!("padding_count {padding_count}");
1112        Ok(padding_count)
1113    }
1114
1115    /// Get buffer size minus padding in frames.
1116    /// Use this to find out how much free space is available in the buffer.
1117    pub fn get_available_space_in_frames(&self) -> WasapiRes<u32> {
1118        let frames = match (self.sharemode, self.timingmode) {
1119            (Some(ShareMode::Exclusive), Some(TimingMode::Events)) => {
1120                let buffer_frame_count = unsafe { self.client.GetBufferSize()? };
1121                trace!("buffer_frame_count {buffer_frame_count}");
1122                buffer_frame_count
1123            }
1124            (Some(_), Some(_)) => {
1125                let padding_count = unsafe { self.client.GetCurrentPadding()? };
1126                let buffer_frame_count = unsafe { self.client.GetBufferSize()? };
1127
1128                buffer_frame_count - padding_count
1129            }
1130            _ => return Err(WasapiError::ClientNotInit),
1131        };
1132        Ok(frames)
1133    }
1134
1135    /// Start the stream on an [IAudioClient]
1136    pub fn start_stream(&self) -> WasapiRes<()> {
1137        unsafe { self.client.Start()? };
1138        Ok(())
1139    }
1140
1141    /// Stop the stream on an [IAudioClient]
1142    pub fn stop_stream(&self) -> WasapiRes<()> {
1143        unsafe { self.client.Stop()? };
1144        Ok(())
1145    }
1146
1147    /// Reset the stream on an [IAudioClient]
1148    pub fn reset_stream(&self) -> WasapiRes<()> {
1149        unsafe { self.client.Reset()? };
1150        Ok(())
1151    }
1152
1153    /// Get a rendering (playback) client
1154    pub fn get_audiorenderclient(&self) -> WasapiRes<AudioRenderClient> {
1155        let client = unsafe { self.client.GetService::<IAudioRenderClient>()? };
1156        Ok(AudioRenderClient {
1157            client,
1158            bytes_per_frame: self.bytes_per_frame.unwrap_or_default(),
1159        })
1160    }
1161
1162    /// Get a capture client
1163    pub fn get_audiocaptureclient(&self) -> WasapiRes<AudioCaptureClient> {
1164        let client = unsafe { self.client.GetService::<IAudioCaptureClient>()? };
1165        Ok(AudioCaptureClient {
1166            client,
1167            sharemode: self.sharemode,
1168            bytes_per_frame: self.bytes_per_frame.unwrap_or_default(),
1169        })
1170    }
1171
1172    /// Get the [AudioSessionControl]
1173    pub fn get_audiosessioncontrol(&self) -> WasapiRes<AudioSessionControl> {
1174        let control = unsafe { self.client.GetService::<IAudioSessionControl>()? };
1175        Ok(AudioSessionControl { control })
1176    }
1177
1178    /// Get the [AudioClock]
1179    pub fn get_audioclock(&self) -> WasapiRes<AudioClock> {
1180        let clock = unsafe { self.client.GetService::<IAudioClock>()? };
1181        Ok(AudioClock { clock })
1182    }
1183
1184    /// Get the direction for this [AudioClient]
1185    pub fn get_direction(&self) -> Direction {
1186        self.direction
1187    }
1188
1189    /// Get the sharemode for this [AudioClient].
1190    /// The sharemode is decided when the client is initialized.
1191    pub fn get_sharemode(&self) -> Option<ShareMode> {
1192        self.sharemode
1193    }
1194
1195    /// Get the timing mode for this [AudioClient].
1196    /// The mode is decided when the client is initialized.
1197    pub fn get_timing_mode(&self) -> Option<TimingMode> {
1198        self.timingmode
1199    }
1200
1201    /// Get the Acoustic Echo Cancellation Control.
1202    /// If it succeeds, the capture endpoint supports control of the loopback reference endpoint for AEC.
1203    pub fn get_aec_control(&self) -> WasapiRes<AcousticEchoCancellationControl> {
1204        let control = unsafe {
1205            self.client
1206                .GetService::<IAcousticEchoCancellationControl>()?
1207        };
1208        Ok(AcousticEchoCancellationControl { control })
1209    }
1210
1211    /// Get the Audio Effects Manager.
1212    pub fn get_audio_effects_manager(&self) -> WasapiRes<AudioEffectsManager> {
1213        let manager = unsafe { self.client.GetService::<IAudioEffectsManager>()? };
1214        Ok(AudioEffectsManager { manager })
1215    }
1216
1217    /// Set the category of an audio stream.
1218    ///
1219    /// It is recommended to use [Self::set_properties] with [AudioClientProperties] instead, as this method only
1220    /// sets the audio stream category, and hence is a subset of available properties.
1221    #[deprecated(
1222        since = "0.20.0",
1223        note = "please use the new function name `set_properties` instead"
1224    )]
1225    pub fn set_audio_stream_category(&self, category: AUDIO_STREAM_CATEGORY) -> WasapiRes<()> {
1226        let audio_client_2 = self.client.cast::<IAudioClient2>()?;
1227        let properties = AudioClientProperties::new().set_category(category);
1228        unsafe { audio_client_2.SetClientProperties(&properties.0)? };
1229        Ok(())
1230    }
1231
1232    /// Set properties of the client's audio stream.
1233    pub fn set_properties(&self, properties: AudioClientProperties) -> WasapiRes<()> {
1234        let audio_client_2 = self.client.cast::<IAudioClient2>()?;
1235        unsafe { audio_client_2.SetClientProperties(&properties.0)? };
1236        Ok(())
1237    }
1238
1239    /// Check if the Acoustic Echo Cancellation (AEC) is supported.
1240    pub fn is_aec_supported(&self) -> WasapiRes<bool> {
1241        if !self.is_aec_effect_present()? {
1242            return Ok(false);
1243        }
1244
1245        match unsafe { self.client.GetService::<IAcousticEchoCancellationControl>() } {
1246            Ok(_) => Ok(true),
1247            Err(err) if err == E_NOINTERFACE.into() => Ok(false),
1248            Err(err) => Err(err.into()),
1249        }
1250    }
1251
1252    /// Check if the Acoustic Echo Cancellation (AEC) effect is currently present.
1253    fn is_aec_effect_present(&self) -> WasapiRes<bool> {
1254        // IAudioEffectsManager requires Windows 11 (build 22000 or higher).
1255        let audio_effects_manager = match self.get_audio_effects_manager() {
1256            Ok(manager) => manager,
1257            Err(WasapiError::Windows(win_err)) if win_err == E_NOINTERFACE.into() => {
1258                // Audio effects manager is not supported, so clearly not present.
1259                return Ok(false);
1260            }
1261            Err(err) => return Err(err),
1262        };
1263
1264        if let Some(audio_effects) = audio_effects_manager.get_audio_effects()? {
1265            // Check if the AEC effect is present in the list of audio effects.
1266            let is_present = audio_effects
1267                .iter()
1268                .any(|effect| effect.id == AUDIO_EFFECT_TYPE_ACOUSTIC_ECHO_CANCELLATION);
1269            return Ok(is_present);
1270        }
1271
1272        Ok(false)
1273    }
1274}
1275
1276/// A builder for constructing parameters that describe the properties of the client's audio stream.
1277///
1278/// Wrapper for
1279/// [AudioClientProperties](https://learn.microsoft.com/en-us/windows/win32/api/audioclient/ns-audioclient-audioclientproperties-r1).
1280#[derive(Copy, Clone, Debug)]
1281pub struct AudioClientProperties(windows::Win32::Media::Audio::AudioClientProperties);
1282
1283impl AudioClientProperties {
1284    /// Create a new [AudioClientProperties] struct with default values.
1285    pub fn new() -> Self {
1286        Self(windows::Win32::Media::Audio::AudioClientProperties {
1287            cbSize: size_of::<windows::Win32::Media::Audio::AudioClientProperties>() as u32,
1288            bIsOffload: FALSE,
1289            eCategory: AudioCategory_Other,
1290            Options: AUDCLNT_STREAMOPTIONS_NONE,
1291        })
1292    }
1293
1294    /// Set whether or not the audio stream is hardware-offloaded.
1295    pub fn set_offload(mut self, is_offload: bool) -> Self {
1296        self.0.bIsOffload = is_offload.into();
1297        self
1298    }
1299
1300    /// Specify the category of the audio stream.
1301    ///
1302    /// See [StreamCategory] for possible categories or use the
1303    /// [AUDIO_STREAM_CATEGORY](https://learn.microsoft.com/en-us/windows/win32/api/audiosessiontypes/ne-audiosessiontypes-audio_stream_category)
1304    /// constants directly.
1305    pub fn set_category<T>(mut self, category: T) -> Self
1306    where
1307        T: Into<AUDIO_STREAM_CATEGORY>,
1308    {
1309        self.0.eCategory = category.into();
1310        self
1311    }
1312
1313    /// Set an option for the audio stream.
1314    ///
1315    /// See [StreamOption] for possible options or use the
1316    /// [AUDCLNT_STREAMOPTIONS](https://learn.microsoft.com/en-us/windows/win32/api/audioclient/ne-audioclient-audclnt_streamoptions)
1317    /// constants directly.
1318    pub fn set_option<T>(mut self, option: T) -> Self
1319    where
1320        T: Into<AUDCLNT_STREAMOPTIONS>,
1321    {
1322        self.0.Options |= option.into();
1323        self
1324    }
1325}
1326
1327impl Default for AudioClientProperties {
1328    fn default() -> Self {
1329        Self::new()
1330    }
1331}
1332
1333/// Struct wrapping an [IAudioSessionManager](https://learn.microsoft.com/en-us/windows/win32/api/audiopolicy/nn-audiopolicy-iaudiosessionmanager)
1334pub struct AudioSessionManager {
1335    session_manager: IAudioSessionManager,
1336}
1337
1338impl AudioSessionManager {
1339    /// Get the [IAudioSessionEnumerator]
1340    pub fn get_audiosessionenumerator(&self) -> WasapiRes<AudioSessionEnumerator> {
1341        let session_manager2: IAudioSessionManager2 = self.session_manager.cast()?;
1342        let session_enumerator = unsafe { session_manager2.GetSessionEnumerator()? };
1343
1344        Ok(AudioSessionEnumerator { session_enumerator })
1345    }
1346}
1347
1348/// Struct wrapping an [IAudioSessionEnumerator](https://learn.microsoft.com/en-us/windows/win32/api/audiopolicy/nn-audiopolicy-iaudiosessionenumerator)
1349pub struct AudioSessionEnumerator {
1350    session_enumerator: IAudioSessionEnumerator,
1351}
1352
1353impl AudioSessionEnumerator {
1354    /// Get the count of sessions.
1355    pub fn get_count(&self) -> WasapiRes<i32> {
1356        Ok(unsafe { self.session_enumerator.GetCount()? })
1357    }
1358
1359    /// Get the [IAudioSessionControl] at the specified index.
1360    pub fn get_session(&self, index: i32) -> WasapiRes<AudioSessionControl> {
1361        let session = unsafe { self.session_enumerator.GetSession(index)? };
1362
1363        Ok(AudioSessionControl { control: session })
1364    }
1365}
1366
1367/// Specifies the category of an audio stream.
1368///
1369/// Wrapper for
1370/// [AUDIO_STREAM_CATEGORY](https://learn.microsoft.com/en-us/windows/win32/api/audiosessiontypes/ne-audiosessiontypes-audio_stream_category).
1371#[derive(Copy, Clone, Debug, PartialEq, Eq)]
1372pub enum StreamCategory {
1373    /// Other audio stream.
1374    Other,
1375    /// Media that will only stream when the app is in the foreground. This enumeration value has been deprecated. For
1376    /// more information, see the Remarks section.
1377    #[deprecated = "See `Remarks` in the Microsoft documentation."]
1378    ForegroundOnlyMedia,
1379    /// Real-time communications, such as VOIP or chat.
1380    Communications,
1381    /// Alert sounds.
1382    Alerts,
1383    /// Sound effects.
1384    SoundEffects,
1385    /// Game sound effects.
1386    GameEffects,
1387    /// Background audio for games.
1388    GameMedia,
1389    /// Game chat audio. Similar to [StreamCategory::Communications] except that GameChat will not attenuate other
1390    /// streams.
1391    GameChat,
1392    /// Speech.
1393    Speech,
1394    /// Stream that includes audio with dialog.
1395    Movie,
1396    /// Stream that includes audio without dialog.
1397    Media,
1398    /// Media is audio captured with the intent of capturing voice sources located in the ‘far field’. (Far away from
1399    /// the microphone.)
1400    FarFieldSpeech,
1401    /// Media is captured audio that requires consistent speech processing for the captured audio stream across all
1402    /// Windows devices. Used by applications that process speech data using machine learning algorithms.
1403    UniformSpeech,
1404    /// Media is audio captured with the intent of enabling dictation or typing by voice.
1405    VoiceTyping,
1406}
1407
1408impl From<StreamCategory> for AUDIO_STREAM_CATEGORY {
1409    fn from(category: StreamCategory) -> Self {
1410        #[allow(deprecated)]
1411        match category {
1412            StreamCategory::Other => AudioCategory_Other,
1413            StreamCategory::ForegroundOnlyMedia => AudioCategory_ForegroundOnlyMedia,
1414            StreamCategory::Communications => AudioCategory_Communications,
1415            StreamCategory::Alerts => AudioCategory_Alerts,
1416            StreamCategory::SoundEffects => AudioCategory_SoundEffects,
1417            StreamCategory::GameEffects => AudioCategory_GameEffects,
1418            StreamCategory::GameMedia => AudioCategory_GameMedia,
1419            StreamCategory::GameChat => AudioCategory_GameChat,
1420            StreamCategory::Speech => AudioCategory_Speech,
1421            StreamCategory::Movie => AudioCategory_Movie,
1422            StreamCategory::Media => AudioCategory_Media,
1423            StreamCategory::FarFieldSpeech => AudioCategory_FarFieldSpeech,
1424            StreamCategory::UniformSpeech => AudioCategory_UniformSpeech,
1425            StreamCategory::VoiceTyping => AudioCategory_VoiceTyping,
1426        }
1427    }
1428}
1429
1430/// Defines values that describe the characteristics of an audio stream.
1431///
1432/// Wrapper for
1433/// [AUDCLNT_STREAMOPTIONS](https://learn.microsoft.com/en-us/windows/win32/api/audioclient/ne-audioclient-audclnt_streamoptions).
1434#[derive(Copy, Clone, Debug, PartialEq, Eq)]
1435pub enum StreamOption {
1436    /// The audio stream is a 'raw' stream that bypasses all signal processing except for endpoint specific, always-on
1437    /// processing in the Audio Processing Object (APO), driver, and hardware.
1438    Raw,
1439    /// The audio client is requesting that the audio engine match the format proposed by the client. The audio engine
1440    /// will match this format only if the format is supported by the audio driver and associated APOs.
1441    MatchFormat,
1442    /// The audio stream is an ambisonics stream.
1443    Ambisonics,
1444}
1445
1446impl From<StreamOption> for AUDCLNT_STREAMOPTIONS {
1447    fn from(option: StreamOption) -> Self {
1448        match option {
1449            StreamOption::Raw => AUDCLNT_STREAMOPTIONS_RAW,
1450            StreamOption::MatchFormat => AUDCLNT_STREAMOPTIONS_MATCH_FORMAT,
1451            StreamOption::Ambisonics => AUDCLNT_STREAMOPTIONS_AMBISONICS,
1452        }
1453    }
1454}
1455
1456/// Struct wrapping an [IAudioSessionControl](https://docs.microsoft.com/en-us/windows/win32/api/audiopolicy/nn-audiopolicy-iaudiosessioncontrol).
1457pub struct AudioSessionControl {
1458    control: IAudioSessionControl,
1459}
1460
1461impl AudioSessionControl {
1462    /// Get the current session state
1463    pub fn get_state(&self) -> WasapiRes<SessionState> {
1464        let state = unsafe { self.control.GetState()? };
1465        #[allow(non_upper_case_globals)]
1466        let sessionstate = match state {
1467            _ if state == AudioSessionStateActive => SessionState::Active,
1468            _ if state == AudioSessionStateInactive => SessionState::Inactive,
1469            _ if state == AudioSessionStateExpired => SessionState::Expired,
1470            x => return Err(WasapiError::IllegalSessionState(x.0)),
1471        };
1472        Ok(sessionstate)
1473    }
1474
1475    /// Register to receive notifications.
1476    /// Returns a [EventRegistration] struct.
1477    /// The notifications are unregistered when this struct is dropped.
1478    /// Make sure to store the [EventRegistration] in a variable that remains
1479    /// in scope for as long as the event notifications are needed.
1480    ///
1481    /// The function takes ownership of the provided [EventCallbacks].
1482    pub fn register_session_notification(
1483        &self,
1484        callbacks: EventCallbacks,
1485    ) -> WasapiRes<EventRegistration> {
1486        let events: IAudioSessionEvents = AudioSessionEvents::new(callbacks).into();
1487
1488        match unsafe { self.control.RegisterAudioSessionNotification(&events) } {
1489            Ok(()) => Ok(EventRegistration {
1490                events,
1491                control: self.control.clone(),
1492            }),
1493            Err(err) => Err(WasapiError::RegisterNotifications(err)),
1494        }
1495    }
1496
1497    /// Get the id of the process that owns this session.
1498    pub fn get_process_id(&self) -> WasapiRes<u32> {
1499        let control2: IAudioSessionControl2 = self.control.cast()?;
1500
1501        Ok(unsafe { control2.GetProcessId()? })
1502    }
1503
1504    /// Sets the default stream attenuation experience (auto-ducking) provided by the system.
1505    pub fn set_ducking_preference(&self, preference: bool) -> WasapiRes<()> {
1506        let control2: IAudioSessionControl2 = self.control.cast()?;
1507
1508        unsafe { control2.SetDuckingPreference(preference)? };
1509
1510        Ok(())
1511    }
1512}
1513
1514/// Struct for keeping track of the registered notifications.
1515pub struct EventRegistration {
1516    events: IAudioSessionEvents,
1517    control: IAudioSessionControl,
1518}
1519
1520impl Drop for EventRegistration {
1521    fn drop(&mut self) {
1522        let _ = unsafe {
1523            self.control
1524                .UnregisterAudioSessionNotification(&self.events)
1525        };
1526    }
1527}
1528
1529/// Struct wrapping an [IAudioClock](https://docs.microsoft.com/en-us/windows/win32/api/audioclient/nn-audioclient-iaudioclock).
1530pub struct AudioClock {
1531    clock: IAudioClock,
1532}
1533
1534impl AudioClock {
1535    /// Get the frequency for this [AudioClock].
1536    /// Note that the unit for the value is undefined.
1537    pub fn get_frequency(&self) -> WasapiRes<u64> {
1538        let freq = unsafe { self.clock.GetFrequency()? };
1539        Ok(freq)
1540    }
1541
1542    /// Get the current device position. Returns the position, as well as the value of the
1543    /// performance counter at the time the position values was taken.
1544    /// The unit for the position value is undefined, but the frequency and position values are
1545    /// in the same unit. Dividing the position with the frequency gets the position in seconds.
1546    pub fn get_position(&self) -> WasapiRes<(u64, u64)> {
1547        let mut pos = 0;
1548        let mut timer = 0;
1549        unsafe { self.clock.GetPosition(&mut pos, Some(&mut timer))? };
1550        Ok((pos, timer))
1551    }
1552}
1553
1554/// Struct wrapping an [IAudioRenderClient](https://docs.microsoft.com/en-us/windows/win32/api/audioclient/nn-audioclient-iaudiorenderclient).
1555pub struct AudioRenderClient {
1556    client: IAudioRenderClient,
1557    bytes_per_frame: usize,
1558}
1559
1560impl AudioRenderClient {
1561    /// Write raw bytes data to a device from a slice.
1562    /// The number of frames to write should first be checked with the
1563    /// [AudioClient::get_available_space_in_frames()] method.
1564    /// The buffer_flags argument can be used to mark a buffer as silent.
1565    pub fn write_to_device(
1566        &self,
1567        nbr_frames: usize,
1568        data: &[u8],
1569        buffer_flags: Option<BufferFlags>,
1570    ) -> WasapiRes<()> {
1571        if nbr_frames == 0 {
1572            return Ok(());
1573        }
1574        let nbr_bytes = nbr_frames * self.bytes_per_frame;
1575        if nbr_bytes != data.len() {
1576            return Err(WasapiError::DataLengthMismatch {
1577                received: data.len(),
1578                expected: nbr_bytes,
1579            });
1580        }
1581        let bufferptr = unsafe { self.client.GetBuffer(nbr_frames as u32)? };
1582        let bufferslice = unsafe { slice::from_raw_parts_mut(bufferptr, nbr_bytes) };
1583        bufferslice.copy_from_slice(data);
1584        let flags = match buffer_flags {
1585            Some(bflags) => bflags.to_u32(),
1586            None => 0,
1587        };
1588        unsafe { self.client.ReleaseBuffer(nbr_frames as u32, flags)? };
1589        trace!("wrote {nbr_frames} frames");
1590        Ok(())
1591    }
1592
1593    /// Write raw bytes data to a device from a deque.
1594    /// The number of frames to write should first be checked with the
1595    /// [AudioClient::get_available_space_in_frames()] method.
1596    /// The buffer_flags argument can be used to mark a buffer as silent.
1597    pub fn write_to_device_from_deque(
1598        &self,
1599        nbr_frames: usize,
1600        data: &mut VecDeque<u8>,
1601        buffer_flags: Option<BufferFlags>,
1602    ) -> WasapiRes<()> {
1603        if nbr_frames == 0 {
1604            return Ok(());
1605        }
1606        let nbr_bytes = nbr_frames * self.bytes_per_frame;
1607        if nbr_bytes > data.len() {
1608            return Err(WasapiError::DataLengthTooShort {
1609                received: data.len(),
1610                expected: nbr_bytes,
1611            });
1612        }
1613        let bufferptr = unsafe { self.client.GetBuffer(nbr_frames as u32)? };
1614        let bufferslice = unsafe { slice::from_raw_parts_mut(bufferptr, nbr_bytes) };
1615        for element in bufferslice.iter_mut() {
1616            *element = data.pop_front().unwrap();
1617        }
1618        let flags = match buffer_flags {
1619            Some(bflags) => bflags.to_u32(),
1620            None => 0,
1621        };
1622        unsafe { self.client.ReleaseBuffer(nbr_frames as u32, flags)? };
1623        trace!("wrote {nbr_frames} frames");
1624        Ok(())
1625    }
1626}
1627
1628/// Struct representing information on data read from an audio client buffer.
1629#[derive(Debug)]
1630pub struct BufferInfo {
1631    /// Decoded audio client flags.
1632    pub flags: BufferFlags,
1633    /// The index of the first frame that was read from the buffer.
1634    pub index: u64,
1635    /// The timestamp in 100-nanosecond units of the first frame that was read from the buffer.
1636    pub timestamp: u64,
1637}
1638
1639impl BufferInfo {
1640    /// Creates a new [BufferInfo] struct from the `u32` flags value, and `u64` index and timestamp.
1641    pub fn new(flags: u32, index: u64, timestamp: u64) -> Self {
1642        Self {
1643            flags: BufferFlags::new(flags),
1644            index,
1645            timestamp,
1646        }
1647    }
1648
1649    pub fn none() -> Self {
1650        Self {
1651            flags: BufferFlags::none(),
1652            index: 0,
1653            timestamp: 0,
1654        }
1655    }
1656}
1657
1658/// Struct representing the [ _AUDCLNT_BUFFERFLAGS enum values](https://docs.microsoft.com/en-us/windows/win32/api/audioclient/ne-audioclient-_audclnt_bufferflags).
1659#[derive(Debug)]
1660pub struct BufferFlags {
1661    /// AUDCLNT_BUFFERFLAGS_DATA_DISCONTINUITY
1662    pub data_discontinuity: bool,
1663    /// AUDCLNT_BUFFERFLAGS_SILENT
1664    pub silent: bool,
1665    /// AUDCLNT_BUFFERFLAGS_TIMESTAMP_ERROR
1666    pub timestamp_error: bool,
1667}
1668
1669impl BufferFlags {
1670    /// Create a new [BufferFlags] struct from a `u32` value.
1671    pub fn new(flags: u32) -> Self {
1672        BufferFlags {
1673            data_discontinuity: flags & AUDCLNT_BUFFERFLAGS_DATA_DISCONTINUITY.0 as u32 > 0,
1674            silent: flags & AUDCLNT_BUFFERFLAGS_SILENT.0 as u32 > 0,
1675            timestamp_error: flags & AUDCLNT_BUFFERFLAGS_TIMESTAMP_ERROR.0 as u32 > 0,
1676        }
1677    }
1678
1679    pub fn none() -> Self {
1680        BufferFlags {
1681            data_discontinuity: false,
1682            silent: false,
1683            timestamp_error: false,
1684        }
1685    }
1686
1687    /// Convert a [BufferFlags] struct to a `u32` value.
1688    pub fn to_u32(&self) -> u32 {
1689        let mut value = 0;
1690        if self.data_discontinuity {
1691            value += AUDCLNT_BUFFERFLAGS_DATA_DISCONTINUITY.0 as u32;
1692        }
1693        if self.silent {
1694            value += AUDCLNT_BUFFERFLAGS_SILENT.0 as u32;
1695        }
1696        if self.timestamp_error {
1697            value += AUDCLNT_BUFFERFLAGS_TIMESTAMP_ERROR.0 as u32;
1698        }
1699        value
1700    }
1701}
1702
1703/// Struct wrapping an [IAudioCaptureClient](https://docs.microsoft.com/en-us/windows/win32/api/audioclient/nn-audioclient-iaudiocaptureclient).
1704pub struct AudioCaptureClient {
1705    client: IAudioCaptureClient,
1706    sharemode: Option<ShareMode>,
1707    bytes_per_frame: usize,
1708}
1709
1710impl AudioCaptureClient {
1711    /// Get number of frames in next packet when in shared mode.
1712    /// In exclusive mode it returns `None`, instead use [AudioClient::get_buffer_size()] or [AudioClient::get_current_padding()].
1713    /// See [IAudioCaptureClient::GetNextPacketSize](https://learn.microsoft.com/en-us/windows/win32/api/audioclient/nf-audioclient-iaudiocaptureclient-getnextpacketsize).
1714    pub fn get_next_packet_size(&self) -> WasapiRes<Option<u32>> {
1715        if let Some(ShareMode::Exclusive) = self.sharemode {
1716            return Ok(None);
1717        }
1718        let nbr_frames = unsafe { self.client.GetNextPacketSize()? };
1719        Ok(Some(nbr_frames))
1720    }
1721
1722    #[deprecated(
1723        since = "0.17.0",
1724        note = "please use the new function name `get_next_packet_size` instead"
1725    )]
1726    pub fn get_next_nbr_frames(&self) -> WasapiRes<Option<u32>> {
1727        self.get_next_packet_size()
1728    }
1729
1730    /// Read raw bytes from a device into a slice. Returns the number of frames
1731    /// that was read, and the `BufferInfo` describing the buffer that the data was read from.
1732    /// The slice must be large enough to hold all data.
1733    /// If it is longer that needed, the unused elements will not be modified.
1734    pub fn read_from_device(&self, data: &mut [u8]) -> WasapiRes<(u32, BufferInfo)> {
1735        let data_len_in_frames = data.len() / self.bytes_per_frame;
1736        if data_len_in_frames == 0 {
1737            return Ok((0, BufferInfo::none()));
1738        }
1739        let mut buffer_ptr = ptr::null_mut();
1740        let mut nbr_frames_returned = 0;
1741        let mut index: u64 = 0;
1742        let mut timestamp: u64 = 0;
1743        let mut flags = 0;
1744        unsafe {
1745            self.client.GetBuffer(
1746                &mut buffer_ptr,
1747                &mut nbr_frames_returned,
1748                &mut flags,
1749                Some(&mut index),
1750                Some(&mut timestamp),
1751            )?
1752        };
1753        let buffer_info = BufferInfo::new(flags, index, timestamp);
1754        if nbr_frames_returned == 0 {
1755            unsafe { self.client.ReleaseBuffer(nbr_frames_returned)? };
1756            return Ok((0, buffer_info));
1757        }
1758        if data_len_in_frames < nbr_frames_returned as usize {
1759            unsafe { self.client.ReleaseBuffer(nbr_frames_returned)? };
1760            return Err(WasapiError::DataLengthTooShort {
1761                received: data_len_in_frames,
1762                expected: nbr_frames_returned as usize,
1763            });
1764        }
1765        let len_in_bytes = nbr_frames_returned as usize * self.bytes_per_frame;
1766        let bufferslice = unsafe { slice::from_raw_parts(buffer_ptr, len_in_bytes) };
1767        data[..len_in_bytes].copy_from_slice(bufferslice);
1768        if nbr_frames_returned > 0 {
1769            unsafe { self.client.ReleaseBuffer(nbr_frames_returned)? };
1770        }
1771        trace!("read {nbr_frames_returned} frames");
1772        Ok((nbr_frames_returned, buffer_info))
1773    }
1774
1775    /// Read raw bytes data from a device into a deque.
1776    /// Returns the [BufferInfo] describing the buffer that the data was read from.
1777    pub fn read_from_device_to_deque(&self, data: &mut VecDeque<u8>) -> WasapiRes<BufferInfo> {
1778        let mut buffer_ptr = ptr::null_mut();
1779        let mut nbr_frames_returned = 0;
1780        let mut index: u64 = 0;
1781        let mut timestamp: u64 = 0;
1782        let mut flags = 0;
1783        unsafe {
1784            self.client.GetBuffer(
1785                &mut buffer_ptr,
1786                &mut nbr_frames_returned,
1787                &mut flags,
1788                Some(&mut index),
1789                Some(&mut timestamp),
1790            )?
1791        };
1792        let buffer_info = BufferInfo::new(flags, index, timestamp);
1793        if nbr_frames_returned == 0 {
1794            // There is no need to release a buffer of 0 bytes
1795            return Ok(buffer_info);
1796        }
1797        let len_in_bytes = nbr_frames_returned as usize * self.bytes_per_frame;
1798        let bufferslice = unsafe { slice::from_raw_parts(buffer_ptr, len_in_bytes) };
1799        for element in bufferslice.iter() {
1800            data.push_back(*element);
1801        }
1802        if nbr_frames_returned > 0 {
1803            unsafe { self.client.ReleaseBuffer(nbr_frames_returned).unwrap() };
1804        }
1805        trace!("read {nbr_frames_returned} frames");
1806        Ok(buffer_info)
1807    }
1808
1809    /// Get the sharemode for this [AudioCaptureClient].
1810    /// The sharemode is decided when the client is initialized.
1811    pub fn get_sharemode(&self) -> Option<ShareMode> {
1812        self.sharemode
1813    }
1814}
1815
1816/// Struct wrapping a [HANDLE] to an [Event Object](https://docs.microsoft.com/en-us/windows/win32/sync/event-objects).
1817pub struct Handle {
1818    handle: HANDLE,
1819}
1820
1821impl Drop for Handle {
1822    fn drop(&mut self) {
1823        let _ = unsafe { CloseHandle(self.handle) };
1824    }
1825}
1826
1827impl Handle {
1828    /// Wait for an event on a handle, with a timeout given in ms
1829    pub fn wait_for_event(&self, timeout_ms: u32) -> WasapiRes<()> {
1830        let retval = unsafe { WaitForSingleObject(self.handle, timeout_ms) };
1831        if retval.0 != WAIT_OBJECT_0.0 {
1832            return Err(WasapiError::EventTimeout);
1833        }
1834        Ok(())
1835    }
1836}
1837
1838// Struct wrapping an [IAudioEffectsManager](https://learn.microsoft.com/en-us/windows/win32/api/audioclient/nn-audioclient-iaudioeffectsmanager).
1839pub struct AudioEffectsManager {
1840    manager: IAudioEffectsManager,
1841}
1842
1843impl AudioEffectsManager {
1844    /// Gets the current list of audio effects for the associated audio stream.
1845    pub fn get_audio_effects(&self) -> WasapiRes<Option<Vec<AUDIO_EFFECT>>> {
1846        let mut audio_effects: *mut AUDIO_EFFECT = std::ptr::null_mut();
1847        let mut num_effects: u32 = 0;
1848
1849        unsafe {
1850            self.manager
1851                .GetAudioEffects(&mut audio_effects, &mut num_effects)?;
1852        }
1853
1854        if num_effects > 0 {
1855            let effects_slice =
1856                unsafe { slice::from_raw_parts(audio_effects, num_effects as usize) };
1857            let effects_vec = effects_slice.to_vec();
1858            // Free the memory allocated for the audio effects.
1859            unsafe { CoTaskMemFree(Some(audio_effects as *mut _)) };
1860            Ok(Some(effects_vec))
1861        } else {
1862            Ok(None)
1863        }
1864    }
1865}
1866
1867/// Struct wrapping an [AcousticEchoCancellationControl](https://learn.microsoft.com/en-us/windows/win32/api/audioclient/nn-audioclient-iacousticechocancellationcontrol).
1868pub struct AcousticEchoCancellationControl {
1869    control: IAcousticEchoCancellationControl,
1870}
1871
1872impl AcousticEchoCancellationControl {
1873    /// Sets the audio render endpoint to be used as the reference stream for acoustic echo cancellation (AEC).
1874    ///
1875    /// # Parameters
1876    /// - `endpoint_id`: An optional string containing the device ID of the audio render endpoint to use as the loopback reference.
1877    ///   If set to `None`, Windows will automatically select the reference device.
1878    ///   You can obtain the device ID by calling [Device::get_id()].
1879    ///
1880    /// # Errors
1881    /// Returns an error if setting the echo cancellation render endpoint fails.
1882    pub fn set_echo_cancellation_render_endpoint(
1883        &self,
1884        endpoint_id: Option<String>,
1885    ) -> WasapiRes<()> {
1886        let endpoint_id = if let Some(endpoint_id) = endpoint_id {
1887            PCWSTR::from_raw(HSTRING::from(endpoint_id).as_ptr())
1888        } else {
1889            PCWSTR::null()
1890        };
1891        unsafe {
1892            self.control
1893                .SetEchoCancellationRenderEndpoint(endpoint_id)?
1894        };
1895        Ok(())
1896    }
1897}