windows_capture/
encoder.rs

1use std::fs::{self, File};
2use std::path::Path;
3use std::slice;
4use std::sync::atomic::{self, AtomicBool};
5use std::sync::{Arc, mpsc};
6use std::thread::{self, JoinHandle};
7
8use parking_lot::{Condvar, Mutex};
9use windows::Foundation::{TimeSpan, TypedEventHandler};
10use windows::Graphics::DirectX::Direct3D11::IDirect3DSurface;
11use windows::Graphics::Imaging::{BitmapAlphaMode, BitmapEncoder, BitmapPixelFormat};
12use windows::Media::Core::{
13    AudioStreamDescriptor, MediaStreamSample, MediaStreamSource,
14    MediaStreamSourceSampleRequestedEventArgs, MediaStreamSourceStartingEventArgs,
15    VideoStreamDescriptor,
16};
17use windows::Media::MediaProperties::{
18    AudioEncodingProperties, ContainerEncodingProperties, MediaEncodingProfile,
19    MediaEncodingSubtypes, VideoEncodingProperties,
20};
21use windows::Media::Transcoding::MediaTranscoder;
22use windows::Security::Cryptography::CryptographicBuffer;
23use windows::Storage::Streams::{
24    Buffer, DataReader, IRandomAccessStream, InMemoryRandomAccessStream, InputStreamOptions,
25};
26use windows::Storage::{FileAccessMode, StorageFile};
27use windows::core::{HSTRING, Interface};
28
29use crate::d3d11::SendDirectX;
30use crate::frame::{Frame, ImageFormat};
31use crate::settings::ColorFormat;
32
33#[derive(thiserror::Error, Eq, PartialEq, Clone, Debug)]
34pub enum ImageEncoderError {
35    #[error("This color format is not supported for saving as an image")]
36    UnsupportedFormat,
37    #[error("Windows API error: {0}")]
38    WindowsError(#[from] windows::core::Error),
39}
40
41/// The `ImageEncoder` struct is used to encode image buffers into image bytes with a specified format and color format.
42pub struct ImageEncoder {
43    format: ImageFormat,
44    color_format: ColorFormat,
45}
46
47impl ImageEncoder {
48    /// Creates a new `ImageEncoder` with the specified format and color format.
49    ///
50    /// # Arguments
51    ///
52    /// * `format` - The desired image format.
53    /// * `color_format` - The desired color format.
54    ///
55    /// # Returns
56    ///
57    /// A new `ImageEncoder` instance.
58    #[must_use]
59    #[inline]
60    pub const fn new(format: ImageFormat, color_format: ColorFormat) -> Self {
61        Self { format, color_format }
62    }
63
64    /// Encodes the image buffer into image bytes with the specified format.
65    ///
66    /// # Arguments
67    ///
68    /// * `image_buffer` - The image buffer to encode.
69    /// * `width` - The width of the image.
70    /// * `height` - The height of the image.
71    ///
72    /// # Returns
73    ///
74    /// The encoded image bytes as a `Vec<u8>`.
75    ///
76    /// # Errors
77    ///
78    /// Returns an `ImageEncoderError` if the encoding fails or if the color format is unsupported.
79    #[inline]
80    pub fn encode(
81        &self,
82        image_buffer: &[u8],
83        width: u32,
84        height: u32,
85    ) -> Result<Vec<u8>, ImageEncoderError> {
86        let encoder = match self.format {
87            ImageFormat::Jpeg => BitmapEncoder::JpegEncoderId()?,
88            ImageFormat::Png => BitmapEncoder::PngEncoderId()?,
89            ImageFormat::Gif => BitmapEncoder::GifEncoderId()?,
90            ImageFormat::Tiff => BitmapEncoder::TiffEncoderId()?,
91            ImageFormat::Bmp => BitmapEncoder::BmpEncoderId()?,
92            ImageFormat::JpegXr => BitmapEncoder::JpegXREncoderId()?,
93        };
94
95        let stream = InMemoryRandomAccessStream::new()?;
96        let encoder = BitmapEncoder::CreateAsync(encoder, &stream)?.get()?;
97
98        let pixelformat = match self.color_format {
99            ColorFormat::Bgra8 => BitmapPixelFormat::Bgra8,
100            ColorFormat::Rgba8 => BitmapPixelFormat::Rgba8,
101            ColorFormat::Rgba16F => return Err(ImageEncoderError::UnsupportedFormat),
102        };
103
104        encoder.SetPixelData(
105            pixelformat,
106            BitmapAlphaMode::Premultiplied,
107            width,
108            height,
109            1.0,
110            1.0,
111            image_buffer,
112        )?;
113
114        encoder.FlushAsync()?.get()?;
115
116        let buffer = Buffer::Create(u32::try_from(stream.Size()?).unwrap())?;
117        stream.ReadAsync(&buffer, buffer.Capacity()?, InputStreamOptions::None)?.get()?;
118
119        let data_reader = DataReader::FromBuffer(&buffer)?;
120        let length = data_reader.UnconsumedBufferLength()?;
121        let mut bytes = vec![0u8; length as usize];
122        data_reader.ReadBytes(&mut bytes)?;
123
124        Ok(bytes)
125    }
126}
127
128#[derive(thiserror::Error, Debug)]
129pub enum VideoEncoderError {
130    #[error("Windows API error: {0}")]
131    WindowsError(#[from] windows::core::Error),
132    #[error("Failed to send frame: {0}")]
133    FrameSendError(#[from] mpsc::SendError<Option<(VideoEncoderSource, TimeSpan)>>),
134    #[error("Failed to send audio: {0}")]
135    AudioSendError(#[from] mpsc::SendError<Option<(AudioEncoderSource, TimeSpan)>>),
136    #[error("Video encoding is disabled")]
137    VideoDisabled,
138    #[error("Audio encoding is disabled")]
139    AudioDisabled,
140    #[error("I/O error: {0}")]
141    IoError(#[from] std::io::Error),
142}
143
144unsafe impl Send for VideoEncoderError {}
145unsafe impl Sync for VideoEncoderError {}
146
147/// The `VideoEncoderSource` enum represents all the types that can be sent to the encoder.
148pub enum VideoEncoderSource {
149    DirectX(SendDirectX<IDirect3DSurface>),
150    Buffer((SendDirectX<*const u8>, usize)),
151}
152
153/// The `AudioEncoderSource` enum represents all the types that can be sent to the encoder.
154pub enum AudioEncoderSource {
155    Buffer((SendDirectX<*const u8>, usize)),
156}
157
158/// The `VideoSettingsBuilder` struct is used to configure settings for the video encoder.
159pub struct VideoSettingsBuilder {
160    sub_type: VideoSettingsSubType,
161    bitrate: u32,
162    width: u32,
163    height: u32,
164    frame_rate: u32,
165    pixel_aspect_ratio: (u32, u32),
166    disabled: bool,
167}
168
169impl VideoSettingsBuilder {
170    pub const fn new(width: u32, height: u32) -> Self {
171        Self {
172            bitrate: 15000000,
173            frame_rate: 60,
174            pixel_aspect_ratio: (1, 1),
175            sub_type: VideoSettingsSubType::HEVC,
176            width,
177            height,
178            disabled: false,
179        }
180    }
181
182    pub const fn sub_type(mut self, sub_type: VideoSettingsSubType) -> Self {
183        self.sub_type = sub_type;
184        self
185    }
186
187    pub const fn bitrate(mut self, bitrate: u32) -> Self {
188        self.bitrate = bitrate;
189        self
190    }
191
192    pub const fn width(mut self, width: u32) -> Self {
193        self.width = width;
194        self
195    }
196
197    pub const fn height(mut self, height: u32) -> Self {
198        self.height = height;
199        self
200    }
201
202    pub const fn frame_rate(mut self, frame_rate: u32) -> Self {
203        self.frame_rate = frame_rate;
204        self
205    }
206
207    pub const fn pixel_aspect_ratio(mut self, pixel_aspect_ratio: (u32, u32)) -> Self {
208        self.pixel_aspect_ratio = pixel_aspect_ratio;
209        self
210    }
211
212    pub const fn disabled(mut self, disabled: bool) -> Self {
213        self.disabled = disabled;
214        self
215    }
216
217    fn build(self) -> Result<(VideoEncodingProperties, bool), VideoEncoderError> {
218        let properties = VideoEncodingProperties::new()?;
219
220        properties.SetSubtype(&self.sub_type.to_hstring())?;
221        properties.SetBitrate(self.bitrate)?;
222        properties.SetWidth(self.width)?;
223        properties.SetHeight(self.height)?;
224        properties.FrameRate()?.SetNumerator(self.frame_rate)?;
225        properties.FrameRate()?.SetDenominator(1)?;
226        properties.PixelAspectRatio()?.SetNumerator(self.pixel_aspect_ratio.0)?;
227        properties.PixelAspectRatio()?.SetDenominator(self.pixel_aspect_ratio.1)?;
228
229        Ok((properties, self.disabled))
230    }
231}
232
233/// The `AudioSettingsBuilder` is used to configure settings for the audio encoder.
234pub struct AudioSettingsBuilder {
235    bitrate: u32,
236    channel_count: u32,
237    sample_rate: u32,
238    bit_per_sample: u32,
239    sub_type: AudioSettingsSubType,
240    disabled: bool,
241}
242
243impl AudioSettingsBuilder {
244    pub const fn new() -> Self {
245        Self {
246            bitrate: 192000,
247            channel_count: 2,
248            sample_rate: 48000,
249            bit_per_sample: 16,
250            sub_type: AudioSettingsSubType::AAC,
251            disabled: false,
252        }
253    }
254    pub const fn bitrate(mut self, bitrate: u32) -> Self {
255        self.bitrate = bitrate;
256        self
257    }
258
259    pub const fn channel_count(mut self, channel_count: u32) -> Self {
260        self.channel_count = channel_count;
261        self
262    }
263
264    pub const fn sample_rate(mut self, sample_rate: u32) -> Self {
265        self.sample_rate = sample_rate;
266        self
267    }
268
269    pub const fn bit_per_sample(mut self, bit_per_sample: u32) -> Self {
270        self.bit_per_sample = bit_per_sample;
271        self
272    }
273
274    pub const fn sub_type(mut self, sub_type: AudioSettingsSubType) -> Self {
275        self.sub_type = sub_type;
276        self
277    }
278
279    pub const fn disabled(mut self, disabled: bool) -> Self {
280        self.disabled = disabled;
281        self
282    }
283
284    fn build(self) -> Result<(AudioEncodingProperties, bool), VideoEncoderError> {
285        let properties = AudioEncodingProperties::new()?;
286        properties.SetBitrate(self.bitrate)?;
287        properties.SetChannelCount(self.channel_count)?;
288        properties.SetSampleRate(self.sample_rate)?;
289        properties.SetBitsPerSample(self.bit_per_sample)?;
290        properties.SetSubtype(&self.sub_type.to_hstring())?;
291
292        Ok((properties, self.disabled))
293    }
294}
295
296impl Default for AudioSettingsBuilder {
297    fn default() -> Self {
298        Self::new()
299    }
300}
301
302/// The `ContainerSettingsBuilder` is used to configure settings for the container.
303pub struct ContainerSettingsBuilder {
304    sub_type: ContainerSettingsSubType,
305}
306
307impl ContainerSettingsBuilder {
308    pub const fn new() -> Self {
309        Self { sub_type: ContainerSettingsSubType::MPEG4 }
310    }
311
312    pub const fn sub_type(mut self, sub_type: ContainerSettingsSubType) -> Self {
313        self.sub_type = sub_type;
314        self
315    }
316
317    fn build(self) -> Result<ContainerEncodingProperties, VideoEncoderError> {
318        let properties = ContainerEncodingProperties::new()?;
319        properties.SetSubtype(&self.sub_type.to_hstring())?;
320        Ok(properties)
321    }
322}
323
324impl Default for ContainerSettingsBuilder {
325    fn default() -> Self {
326        Self::new()
327    }
328}
329
330/// The `VideoSettingsSubType` enum represents the subtypes for the video encoder.
331#[derive(Eq, PartialEq, Clone, Copy, Debug)]
332pub enum VideoSettingsSubType {
333    ARGB32,
334    BGRA8,
335    D16,
336    H263,
337    H264,
338    H264ES,
339    HEVC,
340    HEVCES,
341    IYUV,
342    L8,
343    L16,
344    MJPG,
345    NV12,
346    MPEG1,
347    MPEG2,
348    RGB24,
349    RGB32,
350    WMV3,
351    WVC1,
352    VP9,
353    YUY2,
354    YV12,
355}
356
357impl VideoSettingsSubType {
358    pub fn to_hstring(&self) -> HSTRING {
359        let s = match self {
360            Self::ARGB32 => "ARGB32",
361            Self::BGRA8 => "BGRA8",
362            Self::D16 => "D16",
363            Self::H263 => "H263",
364            Self::H264 => "H264",
365            Self::H264ES => "H264ES",
366            Self::HEVC => "HEVC",
367            Self::HEVCES => "HEVCES",
368            Self::IYUV => "IYUV",
369            Self::L8 => "L8",
370            Self::L16 => "L16",
371            Self::MJPG => "MJPG",
372            Self::NV12 => "NV12",
373            Self::MPEG1 => "MPEG1",
374            Self::MPEG2 => "MPEG2",
375            Self::RGB24 => "RGB24",
376            Self::RGB32 => "RGB32",
377            Self::WMV3 => "WMV3",
378            Self::WVC1 => "WVC1",
379            Self::VP9 => "VP9",
380            Self::YUY2 => "YUY2",
381            Self::YV12 => "YV12",
382        };
383
384        HSTRING::from(s)
385    }
386}
387
388/// The `AudioSettingsSubType` enum represents the subtypes for the audio encoder.
389#[derive(Eq, PartialEq, Clone, Copy, Debug)]
390pub enum AudioSettingsSubType {
391    AAC,
392    AC3,
393    AACADTS,
394    AACHDCP,
395    AC3SPDIF,
396    AC3HDCP,
397    ADTS,
398    ALAC,
399    AMRNB,
400    AWRWB,
401    DTS,
402    EAC3,
403    FLAC,
404    Float,
405    MP3,
406    MPEG,
407    OPUS,
408    PCM,
409    WMA8,
410    WMA9,
411    Vorbis,
412}
413
414impl AudioSettingsSubType {
415    pub fn to_hstring(&self) -> HSTRING {
416        let s = match self {
417            Self::AAC => "AAC",
418            Self::AC3 => "AC3",
419            Self::AACADTS => "AACADTS",
420            Self::AACHDCP => "AACHDCP",
421            Self::AC3SPDIF => "AC3SPDIF",
422            Self::AC3HDCP => "AC3HDCP",
423            Self::ADTS => "ADTS",
424            Self::ALAC => "ALAC",
425            Self::AMRNB => "AMRNB",
426            Self::AWRWB => "AWRWB",
427            Self::DTS => "DTS",
428            Self::EAC3 => "EAC3",
429            Self::FLAC => "FLAC",
430            Self::Float => "Float",
431            Self::MP3 => "MP3",
432            Self::MPEG => "MPEG",
433            Self::OPUS => "OPUS",
434            Self::PCM => "PCM",
435            Self::WMA8 => "WMA8",
436            Self::WMA9 => "WMA9",
437            Self::Vorbis => "Vorbis",
438        };
439
440        HSTRING::from(s)
441    }
442}
443
444/// The `ContainerSettingsSubType` enum represents the subtypes for the container.
445#[derive(Eq, PartialEq, Clone, Copy, Debug)]
446pub enum ContainerSettingsSubType {
447    ASF,
448    MP3,
449    MPEG4,
450    AVI,
451    MPEG2,
452    WAVE,
453    AACADTS,
454    ADTS,
455    GP3,
456    AMR,
457    FLAC,
458}
459
460impl ContainerSettingsSubType {
461    pub fn to_hstring(&self) -> HSTRING {
462        match self {
463            Self::ASF => HSTRING::from("ASF"),
464            Self::MP3 => HSTRING::from("MP3"),
465            Self::MPEG4 => HSTRING::from("MPEG4"),
466            Self::AVI => HSTRING::from("AVI"),
467            Self::MPEG2 => HSTRING::from("MPEG2"),
468            Self::WAVE => HSTRING::from("WAVE"),
469            Self::AACADTS => HSTRING::from("AACADTS"),
470            Self::ADTS => HSTRING::from("ADTS"),
471            Self::GP3 => HSTRING::from("3GP"),
472            Self::AMR => HSTRING::from("AMR"),
473            Self::FLAC => HSTRING::from("FLAC"),
474        }
475    }
476}
477
478/// The `VideoEncoder` struct is used to encode video frames and save them to a specified file path.
479pub struct VideoEncoder {
480    first_timestamp: Option<TimeSpan>,
481    frame_sender: mpsc::Sender<Option<(VideoEncoderSource, TimeSpan)>>,
482    audio_sender: mpsc::Sender<Option<(AudioEncoderSource, TimeSpan)>>,
483    sample_requested: i64,
484    media_stream_source: MediaStreamSource,
485    starting: i64,
486    transcode_thread: Option<JoinHandle<Result<(), VideoEncoderError>>>,
487    frame_notify: Arc<(Mutex<bool>, Condvar)>,
488    audio_notify: Arc<(Mutex<bool>, Condvar)>,
489    error_notify: Arc<AtomicBool>,
490    is_video_disabled: bool,
491    is_audio_disabled: bool,
492}
493
494impl VideoEncoder {
495    /// Creates a new `VideoEncoder` instance with the specified parameters.
496    ///
497    /// # Arguments
498    ///
499    /// * `encoder_type` - The type of video encoder to use.
500    /// * `encoder_quality` - The quality of the video encoder.
501    /// * `width` - The width of the video frames.
502    /// * `height` - The height of the video frames.
503    /// * `path` - The file path where the encoded video will be saved.
504    ///
505    /// # Returns
506    ///
507    /// Returns a `Result` containing the `VideoEncoder` instance if successful, or a
508    /// `VideoEncoderError` if an error occurs.
509    #[inline]
510    pub fn new<P: AsRef<Path>>(
511        video_settings: VideoSettingsBuilder,
512        audio_settings: AudioSettingsBuilder,
513        container_settings: ContainerSettingsBuilder,
514        path: P,
515    ) -> Result<Self, VideoEncoderError> {
516        let path = path.as_ref();
517        let media_encoding_profile = MediaEncodingProfile::new()?;
518
519        let (video_encoding_properties, is_video_disabled) = video_settings.build()?;
520        media_encoding_profile.SetVideo(&video_encoding_properties)?;
521        let (audio_encoding_properties, is_audio_disabled) = audio_settings.build()?;
522        media_encoding_profile.SetAudio(&audio_encoding_properties)?;
523        let container_encoding_properties = container_settings.build()?;
524        media_encoding_profile.SetContainer(&container_encoding_properties)?;
525
526        let video_encoding_properties = VideoEncodingProperties::CreateUncompressed(
527            &MediaEncodingSubtypes::Bgra8()?,
528            video_encoding_properties.Width()?,
529            video_encoding_properties.Height()?,
530        )?;
531        let video_stream_descriptor = VideoStreamDescriptor::Create(&video_encoding_properties)?;
532
533        let audio_encoding_properties = AudioEncodingProperties::CreateAac(
534            audio_encoding_properties.SampleRate()?,
535            audio_encoding_properties.ChannelCount()?,
536            audio_encoding_properties.Bitrate()?,
537        )?;
538        let audio_stream_descriptor = AudioStreamDescriptor::Create(&audio_encoding_properties)?;
539
540        let media_stream_source = MediaStreamSource::CreateFromDescriptors(
541            &video_stream_descriptor,
542            &audio_stream_descriptor,
543        )?;
544        media_stream_source.SetBufferTime(TimeSpan::default())?;
545
546        let starting = media_stream_source.Starting(&TypedEventHandler::<
547            MediaStreamSource,
548            MediaStreamSourceStartingEventArgs,
549        >::new(move |_, stream_start| {
550            let stream_start = stream_start
551                .as_ref()
552                .expect("MediaStreamSource Starting parameter was None. This should not happen.");
553
554            stream_start.Request()?.SetActualStartPosition(TimeSpan { Duration: 0 })?;
555            Ok(())
556        }))?;
557
558        let (frame_sender, frame_receiver) =
559            mpsc::channel::<Option<(VideoEncoderSource, TimeSpan)>>();
560
561        let (audio_sender, audio_receiver) =
562            mpsc::channel::<Option<(AudioEncoderSource, TimeSpan)>>();
563
564        let frame_notify = Arc::new((Mutex::new(false), Condvar::new()));
565        let audio_notify = Arc::new((Mutex::new(false), Condvar::new()));
566
567        let sample_requested = media_stream_source.SampleRequested(&TypedEventHandler::<
568            MediaStreamSource,
569            MediaStreamSourceSampleRequestedEventArgs,
570        >::new({
571            let frame_receiver = frame_receiver;
572            let frame_notify = frame_notify.clone();
573
574            let audio_receiver = audio_receiver;
575            let audio_notify = audio_notify.clone();
576
577            move |_, sample_requested| {
578                let sample_requested = sample_requested.as_ref().expect(
579                    "MediaStreamSource SampleRequested parameter was None. This should not happen.",
580                );
581
582                if sample_requested
583                    .Request()?
584                    .StreamDescriptor()?
585                    .cast::<AudioStreamDescriptor>()
586                    .is_ok()
587                {
588                    if is_audio_disabled {
589                        sample_requested.Request()?.SetSample(None)?;
590
591                        return Ok(());
592                    }
593
594                    let audio = match audio_receiver.recv() {
595                        Ok(audio) => audio,
596                        Err(e) => panic!("Failed to receive audio from the audio sender: {e}"),
597                    };
598
599                    match audio {
600                        Some((source, timestamp)) => {
601                            let sample = match source {
602                                AudioEncoderSource::Buffer(buffer_data) => {
603                                    let buffer = buffer_data.0;
604                                    let buffer =
605                                        unsafe { slice::from_raw_parts(buffer.0, buffer_data.1) };
606                                    let buffer = CryptographicBuffer::CreateFromByteArray(buffer)?;
607                                    MediaStreamSample::CreateFromBuffer(&buffer, timestamp)?
608                                }
609                            };
610
611                            sample_requested.Request()?.SetSample(&sample)?;
612                        }
613                        None => {
614                            sample_requested.Request()?.SetSample(None)?;
615                        }
616                    }
617
618                    let (lock, cvar) = &*audio_notify;
619                    *lock.lock() = true;
620                    cvar.notify_one();
621                } else {
622                    if is_video_disabled {
623                        sample_requested.Request()?.SetSample(None)?;
624
625                        return Ok(());
626                    }
627
628                    let frame = match frame_receiver.recv() {
629                        Ok(frame) => frame,
630                        Err(e) => panic!("Failed to receive a frame from the frame sender: {e}"),
631                    };
632
633                    match frame {
634                        Some((source, timestamp)) => {
635                            let sample = match source {
636                                VideoEncoderSource::DirectX(surface) => {
637                                    MediaStreamSample::CreateFromDirect3D11Surface(
638                                        &surface.0, timestamp,
639                                    )?
640                                }
641                                VideoEncoderSource::Buffer(buffer_data) => {
642                                    let buffer = buffer_data.0;
643                                    let buffer =
644                                        unsafe { slice::from_raw_parts(buffer.0, buffer_data.1) };
645                                    let buffer = CryptographicBuffer::CreateFromByteArray(buffer)?;
646                                    MediaStreamSample::CreateFromBuffer(&buffer, timestamp)?
647                                }
648                            };
649
650                            sample_requested.Request()?.SetSample(&sample)?;
651                        }
652                        None => {
653                            sample_requested.Request()?.SetSample(None)?;
654                        }
655                    }
656
657                    let (lock, cvar) = &*frame_notify;
658                    *lock.lock() = true;
659                    cvar.notify_one();
660                }
661
662                Ok(())
663            }
664        }))?;
665
666        let media_transcoder = MediaTranscoder::new()?;
667        media_transcoder.SetHardwareAccelerationEnabled(true)?;
668
669        File::create(path)?;
670        let path = fs::canonicalize(path).unwrap().to_string_lossy()[4..].to_string();
671        let path = Path::new(&path);
672
673        let path = &HSTRING::from(path.as_os_str().to_os_string());
674
675        let file = StorageFile::GetFileFromPathAsync(path)?.get()?;
676        let media_stream_output = file.OpenAsync(FileAccessMode::ReadWrite)?.get()?;
677
678        let transcode = media_transcoder
679            .PrepareMediaStreamSourceTranscodeAsync(
680                &media_stream_source,
681                &media_stream_output,
682                &media_encoding_profile,
683            )?
684            .get()?;
685
686        let error_notify = Arc::new(AtomicBool::new(false));
687        let transcode_thread = thread::spawn({
688            let error_notify = error_notify.clone();
689
690            move || -> Result<(), VideoEncoderError> {
691                let result = transcode.TranscodeAsync();
692
693                if result.is_err() {
694                    error_notify.store(true, atomic::Ordering::Relaxed);
695                }
696
697                result?.get()?;
698
699                drop(media_transcoder);
700
701                Ok(())
702            }
703        });
704
705        Ok(Self {
706            first_timestamp: None,
707            frame_sender,
708            audio_sender,
709            sample_requested,
710            media_stream_source,
711            starting,
712            transcode_thread: Some(transcode_thread),
713            frame_notify,
714            audio_notify,
715            error_notify,
716            is_video_disabled,
717            is_audio_disabled,
718        })
719    }
720
721    /// Creates a new `VideoEncoder` instance with the specified parameters.
722    ///
723    /// # Arguments
724    ///
725    /// * `encoder_type` - The type of video encoder to use.
726    /// * `encoder_quality` - The quality of the video encoder.
727    /// * `width` - The width of the video frames.
728    /// * `height` - The height of the video frames.
729    /// * `stream` - The stream where the encoded video will be saved.
730    ///
731    /// # Returns
732    ///
733    /// Returns a `Result` containing the `VideoEncoder` instance if successful, or a
734    /// `VideoEncoderError` if an error occurs.
735    #[inline]
736    pub fn new_from_stream(
737        video_settings: VideoSettingsBuilder,
738        audio_settings: AudioSettingsBuilder,
739        container_settings: ContainerSettingsBuilder,
740        stream: IRandomAccessStream,
741    ) -> Result<Self, VideoEncoderError> {
742        let media_encoding_profile = MediaEncodingProfile::new()?;
743
744        let (video_encoding_properties, is_video_disabled) = video_settings.build()?;
745        media_encoding_profile.SetVideo(&video_encoding_properties)?;
746        let (audio_encoding_properties, is_audio_disabled) = audio_settings.build()?;
747        media_encoding_profile.SetAudio(&audio_encoding_properties)?;
748        let container_encoding_properties = container_settings.build()?;
749        media_encoding_profile.SetContainer(&container_encoding_properties)?;
750
751        let video_encoding_properties = VideoEncodingProperties::CreateUncompressed(
752            &MediaEncodingSubtypes::Bgra8()?,
753            video_encoding_properties.Width()?,
754            video_encoding_properties.Height()?,
755        )?;
756        let video_stream_descriptor = VideoStreamDescriptor::Create(&video_encoding_properties)?;
757
758        let audio_encoding_properties = AudioEncodingProperties::CreateAac(
759            audio_encoding_properties.SampleRate()?,
760            audio_encoding_properties.ChannelCount()?,
761            audio_encoding_properties.Bitrate()?,
762        )?;
763        let audio_stream_descriptor = AudioStreamDescriptor::Create(&audio_encoding_properties)?;
764
765        let media_stream_source = MediaStreamSource::CreateFromDescriptors(
766            &video_stream_descriptor,
767            &audio_stream_descriptor,
768        )?;
769        media_stream_source.SetBufferTime(TimeSpan::default())?;
770
771        let starting = media_stream_source.Starting(&TypedEventHandler::<
772            MediaStreamSource,
773            MediaStreamSourceStartingEventArgs,
774        >::new(move |_, stream_start| {
775            let stream_start = stream_start
776                .as_ref()
777                .expect("MediaStreamSource Starting parameter was None. This should not happen.");
778
779            stream_start.Request()?.SetActualStartPosition(TimeSpan { Duration: 0 })?;
780            Ok(())
781        }))?;
782
783        let (frame_sender, frame_receiver) =
784            mpsc::channel::<Option<(VideoEncoderSource, TimeSpan)>>();
785
786        let (audio_sender, audio_receiver) =
787            mpsc::channel::<Option<(AudioEncoderSource, TimeSpan)>>();
788
789        let frame_notify = Arc::new((Mutex::new(false), Condvar::new()));
790        let audio_notify = Arc::new((Mutex::new(false), Condvar::new()));
791
792        let sample_requested = media_stream_source.SampleRequested(&TypedEventHandler::<
793            MediaStreamSource,
794            MediaStreamSourceSampleRequestedEventArgs,
795        >::new({
796            let frame_receiver = frame_receiver;
797            let frame_notify = frame_notify.clone();
798
799            let audio_receiver = audio_receiver;
800            let audio_notify = audio_notify.clone();
801
802            move |_, sample_requested| {
803                let sample_requested = sample_requested.as_ref().expect(
804                    "MediaStreamSource SampleRequested parameter was None. This should not happen.",
805                );
806
807                if sample_requested
808                    .Request()?
809                    .StreamDescriptor()?
810                    .cast::<AudioStreamDescriptor>()
811                    .is_ok()
812                {
813                    if is_audio_disabled {
814                        sample_requested.Request()?.SetSample(None)?;
815
816                        return Ok(());
817                    }
818
819                    let audio = match audio_receiver.recv() {
820                        Ok(audio) => audio,
821                        Err(e) => panic!("Failed to receive audio from the audio sender: {e}"),
822                    };
823
824                    match audio {
825                        Some((source, timestamp)) => {
826                            let sample = match source {
827                                AudioEncoderSource::Buffer(buffer_data) => {
828                                    let buffer = buffer_data.0;
829                                    let buffer =
830                                        unsafe { slice::from_raw_parts(buffer.0, buffer_data.1) };
831                                    let buffer = CryptographicBuffer::CreateFromByteArray(buffer)?;
832                                    MediaStreamSample::CreateFromBuffer(&buffer, timestamp)?
833                                }
834                            };
835
836                            sample_requested.Request()?.SetSample(&sample)?;
837                        }
838                        None => {
839                            sample_requested.Request()?.SetSample(None)?;
840                        }
841                    }
842
843                    let (lock, cvar) = &*audio_notify;
844                    *lock.lock() = true;
845                    cvar.notify_one();
846                } else {
847                    if is_video_disabled {
848                        sample_requested.Request()?.SetSample(None)?;
849
850                        return Ok(());
851                    }
852
853                    let frame = match frame_receiver.recv() {
854                        Ok(frame) => frame,
855                        Err(e) => panic!("Failed to receive a frame from the frame sender: {e}"),
856                    };
857
858                    match frame {
859                        Some((source, timestamp)) => {
860                            let sample = match source {
861                                VideoEncoderSource::DirectX(surface) => {
862                                    MediaStreamSample::CreateFromDirect3D11Surface(
863                                        &surface.0, timestamp,
864                                    )?
865                                }
866                                VideoEncoderSource::Buffer(buffer_data) => {
867                                    let buffer = buffer_data.0;
868                                    let buffer =
869                                        unsafe { slice::from_raw_parts(buffer.0, buffer_data.1) };
870                                    let buffer = CryptographicBuffer::CreateFromByteArray(buffer)?;
871                                    MediaStreamSample::CreateFromBuffer(&buffer, timestamp)?
872                                }
873                            };
874
875                            sample_requested.Request()?.SetSample(&sample)?;
876                        }
877                        None => {
878                            sample_requested.Request()?.SetSample(None)?;
879                        }
880                    }
881
882                    let (lock, cvar) = &*frame_notify;
883                    *lock.lock() = true;
884                    cvar.notify_one();
885                }
886
887                Ok(())
888            }
889        }))?;
890
891        let media_transcoder = MediaTranscoder::new()?;
892        media_transcoder.SetHardwareAccelerationEnabled(true)?;
893
894        let transcode = media_transcoder
895            .PrepareMediaStreamSourceTranscodeAsync(
896                &media_stream_source,
897                &stream,
898                &media_encoding_profile,
899            )?
900            .get()?;
901
902        let error_notify = Arc::new(AtomicBool::new(false));
903        let transcode_thread = thread::spawn({
904            let error_notify = error_notify.clone();
905
906            move || -> Result<(), VideoEncoderError> {
907                let result = transcode.TranscodeAsync();
908
909                if result.is_err() {
910                    error_notify.store(true, atomic::Ordering::Relaxed);
911                }
912
913                result?.get()?;
914
915                drop(media_transcoder);
916
917                Ok(())
918            }
919        });
920
921        Ok(Self {
922            first_timestamp: None,
923            frame_sender,
924            audio_sender,
925            sample_requested,
926            media_stream_source,
927            starting,
928            transcode_thread: Some(transcode_thread),
929            frame_notify,
930            audio_notify,
931            error_notify,
932            is_video_disabled,
933            is_audio_disabled,
934        })
935    }
936
937    /// Sends a video frame to the video encoder for encoding.
938    ///
939    /// # Arguments
940    ///
941    /// * `frame` - A mutable reference to the `Frame` to be encoded.
942    ///
943    /// # Returns
944    ///
945    /// Returns `Ok(())` if the frame is successfully sent for encoding, or a `VideoEncoderError`
946    /// if an error occurs.
947    #[inline]
948    pub fn send_frame(&mut self, frame: &mut Frame) -> Result<(), VideoEncoderError> {
949        if self.is_video_disabled {
950            return Err(VideoEncoderError::VideoDisabled);
951        }
952
953        let timestamp = match self.first_timestamp {
954            Some(timestamp) => {
955                TimeSpan { Duration: frame.timestamp().Duration - timestamp.Duration }
956            }
957            None => {
958                let timestamp = frame.timestamp();
959                self.first_timestamp = Some(timestamp);
960                TimeSpan { Duration: 0 }
961            }
962        };
963
964        self.frame_sender.send(Some((
965            VideoEncoderSource::DirectX(SendDirectX::new(unsafe {
966                frame.as_raw_surface().clone()
967            })),
968            timestamp,
969        )))?;
970
971        let (lock, cvar) = &*self.frame_notify;
972        let mut processed = lock.lock();
973        if !*processed {
974            cvar.wait(&mut processed);
975        }
976        *processed = false;
977        drop(processed);
978
979        if self.error_notify.load(atomic::Ordering::Relaxed) {
980            if let Some(transcode_thread) = self.transcode_thread.take() {
981                transcode_thread.join().expect("Failed to join transcode thread")?;
982            }
983        }
984
985        Ok(())
986    }
987
988    /// Sends a video frame with audio to the video encoder for encoding.
989    ///
990    /// # Arguments
991    ///
992    /// * `frame` - A mutable reference to the `Frame` to be encoded.
993    /// * `audio_buffer` - A reference to the audio byte slice to be encoded.
994    ///
995    /// # Returns
996    ///
997    /// Returns `Ok(())` if the frame is successfully sent for encoding, or a `VideoEncoderError`
998    /// if an error occurs.
999    #[inline]
1000    pub fn send_frame_with_audio(
1001        &mut self,
1002        frame: &mut Frame,
1003        audio_buffer: &[u8],
1004    ) -> Result<(), VideoEncoderError> {
1005        if self.is_video_disabled {
1006            return Err(VideoEncoderError::VideoDisabled);
1007        }
1008
1009        if self.is_audio_disabled {
1010            return Err(VideoEncoderError::AudioDisabled);
1011        }
1012
1013        let timestamp = match self.first_timestamp {
1014            Some(timestamp) => {
1015                TimeSpan { Duration: frame.timestamp().Duration - timestamp.Duration }
1016            }
1017            None => {
1018                let timestamp = frame.timestamp();
1019                self.first_timestamp = Some(timestamp);
1020                TimeSpan { Duration: 0 }
1021            }
1022        };
1023
1024        self.frame_sender.send(Some((
1025            VideoEncoderSource::DirectX(SendDirectX::new(unsafe {
1026                frame.as_raw_surface().clone()
1027            })),
1028            timestamp,
1029        )))?;
1030
1031        let (lock, cvar) = &*self.frame_notify;
1032        let mut processed = lock.lock();
1033        if !*processed {
1034            cvar.wait(&mut processed);
1035        }
1036        *processed = false;
1037        drop(processed);
1038
1039        if self.error_notify.load(atomic::Ordering::Relaxed) {
1040            if let Some(transcode_thread) = self.transcode_thread.take() {
1041                transcode_thread.join().expect("Failed to join transcode thread")?;
1042            }
1043        }
1044
1045        self.audio_sender.send(Some((
1046            AudioEncoderSource::Buffer((
1047                SendDirectX::new(audio_buffer.as_ptr()),
1048                audio_buffer.len(),
1049            )),
1050            timestamp,
1051        )))?;
1052
1053        let (lock, cvar) = &*self.audio_notify;
1054        let mut processed = lock.lock();
1055        if !*processed {
1056            cvar.wait(&mut processed);
1057        }
1058        *processed = false;
1059        drop(processed);
1060
1061        if self.error_notify.load(atomic::Ordering::Relaxed) {
1062            if let Some(transcode_thread) = self.transcode_thread.take() {
1063                transcode_thread.join().expect("Failed to join transcode thread")?;
1064            }
1065        }
1066
1067        Ok(())
1068    }
1069
1070    /// Sends a video frame to the video encoder for encoding.
1071    ///
1072    /// # Arguments
1073    ///
1074    /// * `buffer` - A reference to the frame byte slice to be encoded. The Windows API expects this to be BGRA and bottom-to-top.
1075    /// * `timestamp` - The timestamp of the frame, in 100-nanosecond units.
1076    ///
1077    /// # Returns
1078    ///
1079    /// Returns `Ok(())` if the frame is successfully sent for encoding, or a `VideoEncoderError`
1080    /// if an error occurs.
1081    #[inline]
1082    pub fn send_frame_buffer(
1083        &mut self,
1084        buffer: &[u8],
1085        timestamp: i64,
1086    ) -> Result<(), VideoEncoderError> {
1087        if self.is_video_disabled {
1088            return Err(VideoEncoderError::VideoDisabled);
1089        }
1090
1091        let frame_timestamp = timestamp;
1092        let timestamp = match self.first_timestamp {
1093            Some(timestamp) => TimeSpan { Duration: frame_timestamp - timestamp.Duration },
1094            None => {
1095                let timestamp = frame_timestamp;
1096                self.first_timestamp = Some(TimeSpan { Duration: timestamp });
1097                TimeSpan { Duration: 0 }
1098            }
1099        };
1100
1101        self.frame_sender.send(Some((
1102            VideoEncoderSource::Buffer((SendDirectX::new(buffer.as_ptr()), buffer.len())),
1103            timestamp,
1104        )))?;
1105
1106        let (lock, cvar) = &*self.frame_notify;
1107        let mut processed = lock.lock();
1108        if !*processed {
1109            cvar.wait(&mut processed);
1110        }
1111        *processed = false;
1112        drop(processed);
1113
1114        if self.error_notify.load(atomic::Ordering::Relaxed) {
1115            if let Some(transcode_thread) = self.transcode_thread.take() {
1116                transcode_thread.join().expect("Failed to join transcode thread")?;
1117            }
1118        }
1119
1120        Ok(())
1121    }
1122
1123    /// Sends a video audio to the video encoder for encoding.
1124    ///
1125    /// # Arguments
1126    ///
1127    /// * `buffer` - A reference to the audio byte slice to be encoded.
1128    /// * `timestamp` - The timestamp of the audio buffer, in 100-nanosecond units.
1129    ///
1130    /// # Returns
1131    ///
1132    /// Returns `Ok(())` if the frame is successfully sent for encoding, or a `VideoEncoderError`
1133    /// if an error occurs.
1134    #[inline]
1135    pub fn send_audio_buffer(
1136        &mut self,
1137        buffer: &[u8],
1138        timestamp: i64,
1139    ) -> Result<(), VideoEncoderError> {
1140        if self.is_audio_disabled {
1141            return Err(VideoEncoderError::AudioDisabled);
1142        }
1143
1144        let audio_timestamp = timestamp;
1145        let timestamp = match self.first_timestamp {
1146            Some(timestamp) => TimeSpan { Duration: audio_timestamp - timestamp.Duration },
1147            None => {
1148                let timestamp = audio_timestamp;
1149                self.first_timestamp = Some(TimeSpan { Duration: timestamp });
1150                TimeSpan { Duration: 0 }
1151            }
1152        };
1153
1154        self.audio_sender.send(Some((
1155            AudioEncoderSource::Buffer((SendDirectX::new(buffer.as_ptr()), buffer.len())),
1156            timestamp,
1157        )))?;
1158
1159        let (lock, cvar) = &*self.audio_notify;
1160        let mut processed = lock.lock();
1161        if !*processed {
1162            cvar.wait(&mut processed);
1163        }
1164        *processed = false;
1165        drop(processed);
1166
1167        if self.error_notify.load(atomic::Ordering::Relaxed) {
1168            if let Some(transcode_thread) = self.transcode_thread.take() {
1169                transcode_thread.join().expect("Failed to join transcode thread")?;
1170            }
1171        }
1172
1173        Ok(())
1174    }
1175
1176    /// Finishes encoding the video and performs any necessary cleanup.
1177    ///
1178    /// # Returns
1179    ///
1180    /// Returns `Ok(())` if the encoding is successfully finished, or a `VideoEncoderError` if an
1181    /// error occurs.
1182    #[inline]
1183    pub fn finish(mut self) -> Result<(), VideoEncoderError> {
1184        self.frame_sender.send(None)?;
1185        self.audio_sender.send(None)?;
1186
1187        if let Some(transcode_thread) = self.transcode_thread.take() {
1188            transcode_thread.join().expect("Failed to join transcode thread")?;
1189        }
1190
1191        self.media_stream_source.RemoveStarting(self.starting)?;
1192        self.media_stream_source.RemoveSampleRequested(self.sample_requested)?;
1193
1194        Ok(())
1195    }
1196}
1197
1198impl Drop for VideoEncoder {
1199    #[inline]
1200    fn drop(&mut self) {
1201        let _ = self.frame_sender.send(None);
1202
1203        if let Some(transcode_thread) = self.transcode_thread.take() {
1204            let _ = transcode_thread.join();
1205        }
1206    }
1207}
1208
1209#[allow(clippy::non_send_fields_in_send_ty)]
1210unsafe impl Send for VideoEncoder {}