hylarana_resample/
lib.rs

1use std::{
2    marker::PhantomData,
3    ptr::null_mut,
4    sync::{
5        atomic::AtomicBool,
6        mpsc::{channel, Sender},
7        Arc,
8    },
9    thread,
10};
11
12use common::atomic::EasyAtomic;
13use ffmpeg::*;
14use thiserror::Error;
15
16#[derive(Debug, Clone, Copy)]
17pub enum AudioSampleFormat {
18    I16,
19    I32,
20    F32,
21}
22
23impl Into<AVSampleFormat> for AudioSampleFormat {
24    fn into(self) -> AVSampleFormat {
25        match self {
26            Self::I16 => AVSampleFormat::AV_SAMPLE_FMT_S16,
27            Self::I32 => AVSampleFormat::AV_SAMPLE_FMT_S32,
28            Self::F32 => AVSampleFormat::AV_SAMPLE_FMT_FLT,
29        }
30    }
31}
32
33#[derive(Debug, Clone, Copy)]
34pub struct AudioSampleDescription {
35    pub sample_bits: AudioSampleFormat,
36    pub sample_rate: u32,
37    pub channels: u8,
38}
39
40impl AudioSampleDescription {
41    fn channel_layout(&self) -> AVChannelLayout {
42        AVChannelLayout {
43            order: AVChannelOrder::AV_CHANNEL_ORDER_NATIVE,
44            nb_channels: self.channels as i32,
45            u: AVChannelLayout__bindgen_ty_1 {
46                mask: match self.channels {
47                    1 => AV_CH_LAYOUT_MONO,
48                    2 => AV_CH_LAYOUT_STEREO,
49                    _ => unimplemented!("unsupports audio channels={}", self.channels),
50                },
51            },
52            opaque: null_mut(),
53        }
54    }
55}
56
57pub trait AudioResamplerOutput<T>: Send {
58    fn output(&mut self, buffer: &[T], frames: u32) -> bool;
59}
60
61#[derive(Debug, Error)]
62pub enum AudioResamplerError {
63    #[error("failed to send buffer to queue")]
64    SendBufferError,
65    #[error("failed to create swresample")]
66    CreateSwresampleError,
67    #[error("queue is closed")]
68    QueueClosed,
69}
70
71/// Audio resampler, quickly resample input to a single channel count and
72/// different sampling rates.
73///
74/// Note that due to the fast sampling, the quality may be reduced.
75pub struct AudioResampler<I, O> {
76    _p: PhantomData<O>,
77    tx: Sender<Vec<I>>,
78    status: Arc<AtomicBool>,
79}
80
81impl<I, O> AudioResampler<I, O>
82where
83    I: Copy + Send + 'static,
84    O: Copy + Default,
85{
86    pub fn new<T: AudioResamplerOutput<O> + 'static>(
87        input: AudioSampleDescription,
88        output: AudioSampleDescription,
89        mut sink: T,
90    ) -> Result<Self, AudioResamplerError> {
91        let (tx, rx) = channel::<Vec<I>>();
92
93        let status = Arc::new(AtomicBool::new(true));
94        let mut swresample = Swresample::new(&input, &output)
95            .ok_or_else(|| AudioResamplerError::CreateSwresampleError)?;
96
97        let status_ = status.clone();
98        thread::spawn(move || {
99            let mut output: Vec<O> =
100                vec![O::default(); output.sample_rate as usize * output.channels as usize];
101
102            while let Ok(buffer) = rx.recv() {
103                let frames = buffer.len() / input.channels as usize;
104                if swresample.convert(&buffer, &mut output, frames as i32) {
105                    if !sink.output(&output, frames as u32) {
106                        break;
107                    }
108                } else {
109                    break;
110                }
111            }
112
113            status_.update(false);
114        });
115
116        Ok(Self {
117            _p: PhantomData::default(),
118            status,
119            tx,
120        })
121    }
122
123    pub fn resample<'a>(&'a mut self, buffer: &'a [I]) -> Result<(), AudioResamplerError> {
124        if !self.status.get() {
125            return Err(AudioResamplerError::QueueClosed);
126        }
127
128        self.tx
129            .send(buffer.to_vec())
130            .map_err(|_| AudioResamplerError::SendBufferError)?;
131        Ok(())
132    }
133}
134
135struct Swresample(*mut SwrContext);
136
137unsafe impl Send for Swresample {}
138unsafe impl Sync for Swresample {}
139
140impl Swresample {
141    fn new(input: &AudioSampleDescription, output: &AudioSampleDescription) -> Option<Self> {
142        let mut ctx = null_mut();
143        if unsafe {
144            swr_alloc_set_opts2(
145                &mut ctx,
146                &output.channel_layout(),
147                output.sample_bits.into(),
148                output.sample_rate as i32,
149                &input.channel_layout(),
150                input.sample_bits.into(),
151                output.sample_rate as i32,
152                0,
153                null_mut(),
154            )
155        } != 0
156        {
157            return None;
158        }
159
160        if unsafe { swr_init(ctx) } != 0 {
161            return None;
162        }
163
164        Some(Self(ctx))
165    }
166
167    fn convert<I, O>(&mut self, input: &[I], output: &mut [O], frames: i32) -> bool {
168        unsafe {
169            swr_convert(
170                self.0,
171                [output.as_mut_ptr() as _].as_ptr(),
172                frames,
173                [input.as_ptr() as _].as_ptr(),
174                frames,
175            ) >= 0
176        }
177    }
178}
179
180impl Drop for Swresample {
181    fn drop(&mut self) {
182        unsafe {
183            swr_free(&mut self.0);
184        }
185    }
186}
187
188#[cfg(target_os = "windows")]
189pub mod win32 {
190    use std::mem::ManuallyDrop;
191
192    use common::{
193        frame::VideoFormat,
194        win32::{
195            windows::{
196                core::{Error, Interface},
197                Win32::{
198                    Foundation::RECT,
199                    Graphics::{
200                        Direct3D11::{
201                            ID3D11Device, ID3D11DeviceContext, ID3D11Texture2D, ID3D11VideoContext,
202                            ID3D11VideoDevice, ID3D11VideoProcessor,
203                            ID3D11VideoProcessorEnumerator, ID3D11VideoProcessorInputView,
204                            ID3D11VideoProcessorOutputView, D3D11_BIND_RENDER_TARGET,
205                            D3D11_BIND_SHADER_RESOURCE, D3D11_CPU_ACCESS_READ,
206                            D3D11_CPU_ACCESS_WRITE, D3D11_MAPPED_SUBRESOURCE, D3D11_MAP_READ,
207                            D3D11_MAP_WRITE_DISCARD, D3D11_RESOURCE_MISC_SHARED,
208                            D3D11_TEXTURE2D_DESC, D3D11_USAGE_DEFAULT, D3D11_USAGE_DYNAMIC,
209                            D3D11_USAGE_STAGING, D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE,
210                            D3D11_VIDEO_PROCESSOR_COLOR_SPACE, D3D11_VIDEO_PROCESSOR_CONTENT_DESC,
211                            D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC,
212                            D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC, D3D11_VIDEO_PROCESSOR_STREAM,
213                            D3D11_VIDEO_USAGE_PLAYBACK_NORMAL, D3D11_VPIV_DIMENSION_TEXTURE2D,
214                            D3D11_VPOV_DIMENSION_TEXTURE2D,
215                        },
216                        Dxgi::Common::{
217                            DXGI_FORMAT, DXGI_FORMAT_B8G8R8A8_UNORM, DXGI_FORMAT_NV12,
218                            DXGI_FORMAT_R8G8B8A8_UNORM,
219                        },
220                    },
221                },
222            },
223            Direct3DDevice,
224        },
225        Size,
226    };
227
228    #[derive(Clone)]
229    pub enum Resource {
230        Default(VideoFormat, Size),
231        Texture(ID3D11Texture2D),
232    }
233
234    pub struct VideoResamplerOptions {
235        pub direct3d: Direct3DDevice,
236        pub input: Resource,
237        pub output: Resource,
238    }
239
240    /// Used to convert video frames using hardware accelerators, including
241    /// color space conversion and scaling. Note that the output is fixed to
242    /// NV12, but the input is optional and is RGBA by default. However, if
243    /// you use the `process` method, you can let the external texture
244    /// decide what format to use, because this method does not copy the
245    /// texture.
246    #[allow(unused)]
247    pub struct VideoResampler {
248        d3d_device: ID3D11Device,
249        d3d_context: ID3D11DeviceContext,
250        video_device: ID3D11VideoDevice,
251        video_context: ID3D11VideoContext,
252        input_texture: ID3D11Texture2D,
253        input_sw_texture: Option<ID3D11Texture2D>,
254        output_texture: ID3D11Texture2D,
255        output_sw_texture: Option<ID3D11Texture2D>,
256        video_enumerator: ID3D11VideoProcessorEnumerator,
257        video_processor: ID3D11VideoProcessor,
258        input_view: ID3D11VideoProcessorInputView,
259        output_view: ID3D11VideoProcessorOutputView,
260    }
261
262    unsafe impl Send for VideoResampler {}
263    unsafe impl Sync for VideoResampler {}
264
265    impl VideoResampler {
266        /// Create `VideoResampler`, the default_device parameter is used to
267        /// directly use the device when it has been created externally, so
268        /// there is no need to copy across devices, which improves
269        /// processing performance.
270        pub fn new(options: VideoResamplerOptions) -> Result<Self, Error> {
271            let (d3d_device, d3d_context) = (options.direct3d.device, options.direct3d.context);
272            let video_device = d3d_device.cast::<ID3D11VideoDevice>()?;
273            let video_context = d3d_context.cast::<ID3D11VideoContext>()?;
274
275            let input_texture = match options.input.clone() {
276                Resource::Texture(texture) => texture,
277                Resource::Default(format, size) => unsafe {
278                    let mut desc = D3D11_TEXTURE2D_DESC::default();
279                    desc.Width = size.width;
280                    desc.Height = size.height;
281                    desc.MipLevels = 1;
282                    desc.ArraySize = 1;
283                    desc.SampleDesc.Count = 1;
284                    desc.SampleDesc.Quality = 0;
285                    desc.Usage = D3D11_USAGE_DEFAULT;
286                    desc.BindFlags = D3D11_BIND_RENDER_TARGET.0 as u32;
287                    desc.CPUAccessFlags = 0;
288                    desc.MiscFlags = 0;
289                    desc.Format = video_fmt_to_dxgi_fmt(format);
290
291                    let mut texture = None;
292                    d3d_device.CreateTexture2D(&desc, None, Some(&mut texture))?;
293                    texture.unwrap()
294                },
295            };
296
297            let input_sw_texture = match options.input {
298                Resource::Default(format, size)
299                    if format == VideoFormat::NV12 || format == VideoFormat::I420 =>
300                {
301                    let mut desc = D3D11_TEXTURE2D_DESC::default();
302                    desc.Width = size.width;
303                    desc.Height = size.height;
304                    desc.MipLevels = 1;
305                    desc.ArraySize = 1;
306                    desc.SampleDesc.Count = 1;
307                    desc.SampleDesc.Quality = 0;
308                    desc.Usage = D3D11_USAGE_DYNAMIC;
309                    desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE.0 as u32;
310                    desc.BindFlags = D3D11_BIND_SHADER_RESOURCE.0 as u32;
311                    desc.MiscFlags = 0;
312                    desc.Format = video_fmt_to_dxgi_fmt(format);
313
314                    let mut texture = None;
315                    unsafe {
316                        d3d_device.CreateTexture2D(&desc, None, Some(&mut texture))?;
317                    }
318
319                    Some(texture.unwrap())
320                }
321                _ => None,
322            };
323
324            let output_texture = match options.output {
325                Resource::Texture(texture) => texture,
326                Resource::Default(format, size) => unsafe {
327                    let mut desc = D3D11_TEXTURE2D_DESC::default();
328                    desc.Width = size.width;
329                    desc.Height = size.height;
330                    desc.MipLevels = 1;
331                    desc.ArraySize = 1;
332                    desc.SampleDesc.Count = 1;
333                    desc.SampleDesc.Quality = 0;
334                    desc.Usage = D3D11_USAGE_DEFAULT;
335                    desc.BindFlags = D3D11_BIND_RENDER_TARGET.0 as u32;
336                    desc.CPUAccessFlags = 0;
337                    desc.MiscFlags = D3D11_RESOURCE_MISC_SHARED.0 as u32;
338                    desc.Format = video_fmt_to_dxgi_fmt(format);
339
340                    let mut texture = None;
341                    d3d_device.CreateTexture2D(&desc, None, Some(&mut texture))?;
342                    texture.unwrap()
343                },
344            };
345
346            let mut input_desc = D3D11_TEXTURE2D_DESC::default();
347            unsafe {
348                input_texture.GetDesc(&mut input_desc);
349            }
350
351            let mut output_desc = D3D11_TEXTURE2D_DESC::default();
352            unsafe {
353                output_texture.GetDesc(&mut output_desc);
354            }
355
356            let (video_enumerator, video_processor) = unsafe {
357                let mut desc = D3D11_VIDEO_PROCESSOR_CONTENT_DESC::default();
358                desc.InputFrameFormat = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE;
359                desc.InputWidth = input_desc.Width;
360                desc.InputHeight = input_desc.Height;
361                desc.OutputWidth = output_desc.Width;
362                desc.OutputHeight = output_desc.Height;
363                desc.Usage = D3D11_VIDEO_USAGE_PLAYBACK_NORMAL;
364
365                let enumerator = video_device.CreateVideoProcessorEnumerator(&desc)?;
366                let processor = video_device.CreateVideoProcessor(&enumerator, 0)?;
367                (enumerator, processor)
368            };
369
370            let input_view = unsafe {
371                let mut desc = D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC::default();
372                desc.FourCC = 0;
373                desc.ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D;
374                desc.Anonymous.Texture2D.MipSlice = 0;
375
376                let mut view = None;
377                video_device.CreateVideoProcessorInputView(
378                    &input_texture,
379                    &video_enumerator,
380                    &desc,
381                    Some(&mut view),
382                )?;
383
384                view.unwrap()
385            };
386
387            let output_view = unsafe {
388                let mut desc = D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC::default();
389                desc.ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2D;
390
391                let mut view = None;
392                video_device.CreateVideoProcessorOutputView(
393                    &output_texture,
394                    &video_enumerator,
395                    &desc,
396                    Some(&mut view),
397                )?;
398
399                view.unwrap()
400            };
401
402            unsafe {
403                video_context.VideoProcessorSetStreamSourceRect(
404                    &video_processor,
405                    0,
406                    true,
407                    Some(&RECT {
408                        left: 0,
409                        top: 0,
410                        right: input_desc.Width as i32,
411                        bottom: input_desc.Height as i32,
412                    }),
413                );
414            }
415
416            unsafe {
417                video_context.VideoProcessorSetStreamDestRect(
418                    &video_processor,
419                    0,
420                    true,
421                    Some(&RECT {
422                        left: 0,
423                        top: 0,
424                        right: output_desc.Width as i32,
425                        bottom: output_desc.Height as i32,
426                    }),
427                );
428            }
429
430            unsafe {
431                let color_space = D3D11_VIDEO_PROCESSOR_COLOR_SPACE::default();
432                video_context.VideoProcessorSetStreamColorSpace(&video_processor, 0, &color_space);
433            }
434
435            Ok(Self {
436                output_sw_texture: None,
437                input_sw_texture,
438                d3d_device,
439                d3d_context,
440                video_device,
441                video_context,
442                video_enumerator,
443                video_processor,
444                input_texture,
445                output_texture,
446                input_view,
447                output_view,
448            })
449        }
450
451        /// To update the internal texture, simply copy it to the internal
452        /// texture.
453        pub fn update_input(&mut self, texture: &ID3D11Texture2D) {
454            unsafe {
455                self.d3d_context.CopyResource(&self.input_texture, texture);
456            }
457        }
458
459        /// Perform the conversion. This method will copy the texture array to
460        /// the internal texture, so there are restrictions on the
461        /// format of the incoming texture. Because the internal one is
462        /// fixed to RGBA, the external texture can only be RGBA.
463        pub fn update_input_from_buffer(
464            &mut self,
465            format: VideoFormat,
466            data: &[&[u8]],
467            linesize: &[u32],
468        ) -> Result<(), Error> {
469            match format {
470                VideoFormat::BGRA | VideoFormat::RGBA => unsafe {
471                    self.d3d_context.UpdateSubresource(
472                        &self.input_texture,
473                        0,
474                        None,
475                        data[0].as_ptr() as *const _,
476                        linesize[0],
477                        0,
478                    );
479                },
480                // Although NV12 separates the two planes, usually memory is contiguous and
481                // is treated uniformly here, but of course there are contingencies, and
482                // this is not a good implementation here, but in most cases there will be
483                // one less copy step.
484                VideoFormat::NV12 => {
485                    if is_single_allocation(&data[0..2]) {
486                        unsafe {
487                            self.d3d_context.UpdateSubresource(
488                                &self.input_texture,
489                                0,
490                                None,
491                                data[0].as_ptr() as *const _,
492                                linesize[0],
493                                0,
494                            );
495                        }
496                    } else {
497                        if let Some(texture) = &self.input_sw_texture {
498                            let mut mappend = D3D11_MAPPED_SUBRESOURCE::default();
499                            unsafe {
500                                self.d3d_context.Map(
501                                    texture,
502                                    0,
503                                    D3D11_MAP_WRITE_DISCARD,
504                                    0,
505                                    Some(&mut mappend),
506                                )?;
507                            }
508
509                            unsafe {
510                                std::slice::from_raw_parts_mut(
511                                    mappend.pData as *mut u8,
512                                    data[0].len(),
513                                )
514                            }
515                            .copy_from_slice(data[0]);
516
517                            unsafe {
518                                std::slice::from_raw_parts_mut(
519                                    mappend.pData.add(data[0].len()) as *mut u8,
520                                    data[1].len(),
521                                )
522                            }
523                            .copy_from_slice(data[1]);
524
525                            unsafe {
526                                self.d3d_context.Unmap(texture, 0);
527                                self.d3d_context.CopyResource(&self.input_texture, texture);
528                            }
529                        }
530                    }
531                }
532                VideoFormat::I420 => {
533                    if let Some(texture) = &self.input_sw_texture {
534                        let mut mappend = D3D11_MAPPED_SUBRESOURCE::default();
535                        unsafe {
536                            self.d3d_context.Map(
537                                texture,
538                                0,
539                                D3D11_MAP_WRITE_DISCARD,
540                                0,
541                                Some(&mut mappend),
542                            )?;
543                        }
544
545                        unsafe {
546                            std::slice::from_raw_parts_mut(mappend.pData as *mut u8, data[0].len())
547                        }
548                        .copy_from_slice(data[0]);
549
550                        {
551                            let buffer = unsafe {
552                                std::slice::from_raw_parts_mut(
553                                    mappend.pData.add(data[0].len()) as *mut u8,
554                                    data[1].len() + data[2].len(),
555                                )
556                            };
557
558                            let mut index = 0;
559                            for i in 0..data[1].len() {
560                                buffer[index] = data[1][i];
561                                buffer[index + 1] = data[2][i];
562                                index += 2;
563                            }
564                        }
565
566                        unsafe {
567                            self.d3d_context.Unmap(texture, 0);
568                            self.d3d_context.CopyResource(&self.input_texture, texture);
569                        }
570                    }
571                }
572            };
573
574            Ok(())
575        }
576
577        /// Perform the conversion. This method will not copy the passed
578        /// texture, but will use the texture directly, which can save a
579        /// copy step and improve performance.
580        pub fn create_input_view(
581            &mut self,
582            texture: &ID3D11Texture2D,
583            index: u32,
584        ) -> Result<ID3D11VideoProcessorInputView, Error> {
585            let input_view = unsafe {
586                let mut desc = D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC::default();
587                desc.FourCC = 0;
588                desc.ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D;
589                desc.Anonymous.Texture2D.MipSlice = 0;
590                desc.Anonymous.Texture2D.ArraySlice = index;
591
592                let mut view = None;
593                self.video_device.CreateVideoProcessorInputView(
594                    texture,
595                    &self.video_enumerator,
596                    &desc,
597                    Some(&mut view),
598                )?;
599
600                view.unwrap()
601            };
602
603            Ok(input_view)
604        }
605
606        pub fn get_output(&self) -> &ID3D11Texture2D {
607            &self.output_texture
608        }
609
610        pub fn get_output_buffer(&mut self) -> Result<TextureBuffer, Error> {
611            if self.output_sw_texture.is_none() {
612                unsafe {
613                    let mut desc = D3D11_TEXTURE2D_DESC::default();
614                    self.output_texture.GetDesc(&mut desc);
615
616                    desc.CPUAccessFlags = D3D11_CPU_ACCESS_READ.0 as u32;
617                    desc.Usage = D3D11_USAGE_STAGING;
618                    desc.BindFlags = 0;
619                    desc.MiscFlags = 0;
620
621                    let mut texture = None;
622                    self.d3d_device
623                        .CreateTexture2D(&desc, None, Some(&mut texture))?;
624
625                    self.output_sw_texture = Some(texture.unwrap());
626                };
627            }
628
629            let texture = self.output_sw_texture.as_ref().unwrap();
630            unsafe {
631                self.d3d_context.CopyResource(texture, &self.output_texture);
632            }
633
634            Ok(TextureBuffer::new(&self.d3d_context, texture)?)
635        }
636
637        pub fn process(
638            &mut self,
639            input_view: Option<ID3D11VideoProcessorInputView>,
640        ) -> Result<(), Error> {
641            unsafe {
642                let mut streams = [D3D11_VIDEO_PROCESSOR_STREAM::default()];
643                streams[0].Enable = true.into();
644                streams[0].OutputIndex = 0;
645                streams[0].InputFrameOrField = 0;
646                streams[0].pInputSurface =
647                    ManuallyDrop::new(Some(input_view.unwrap_or_else(|| self.input_view.clone())));
648
649                self.video_context.VideoProcessorBlt(
650                    &self.video_processor,
651                    &self.output_view,
652                    0,
653                    &streams,
654                )?;
655
656                ManuallyDrop::drop(&mut streams[0].pInputSurface);
657            }
658
659            Ok(())
660        }
661    }
662
663    pub struct TextureBuffer<'a> {
664        d3d_context: &'a ID3D11DeviceContext,
665        texture: &'a ID3D11Texture2D,
666        resource: D3D11_MAPPED_SUBRESOURCE,
667    }
668
669    unsafe impl Send for TextureBuffer<'_> {}
670    unsafe impl Sync for TextureBuffer<'_> {}
671
672    impl<'a> TextureBuffer<'a> {
673        pub fn new(
674            d3d_context: &'a ID3D11DeviceContext,
675            texture: &'a ID3D11Texture2D,
676        ) -> Result<Self, Error> {
677            let mut resource = D3D11_MAPPED_SUBRESOURCE::default();
678            unsafe {
679                d3d_context.Map(texture, 0, D3D11_MAP_READ, 0, Some(&mut resource))?;
680            }
681
682            Ok(Self {
683                d3d_context,
684                resource,
685                texture,
686            })
687        }
688
689        /// Represents a pointer to texture data. Internally, the texture is
690        /// copied to the CPU first, and then the internal data is
691        /// mapped.
692        pub fn buffer(&self) -> *const u8 {
693            self.resource.pData as *const _
694        }
695
696        /// The stride of the texture data
697        pub fn stride(&self) -> u32 {
698            self.resource.RowPitch
699        }
700    }
701
702    impl Drop for TextureBuffer<'_> {
703        fn drop(&mut self) {
704            unsafe {
705                self.d3d_context.Unmap(self.texture, 0);
706            }
707        }
708    }
709
710    fn is_single_allocation<T>(source: &[&[T]]) -> bool {
711        let mut size = 0;
712        let mut offset = 0;
713
714        for it in source {
715            if size > 0 {
716                if offset + size != it.as_ptr() as usize {
717                    return false;
718                }
719            }
720
721            size = it.len();
722            offset = it.as_ptr() as usize;
723        }
724
725        true
726    }
727
728    fn video_fmt_to_dxgi_fmt(format: VideoFormat) -> DXGI_FORMAT {
729        match format {
730            VideoFormat::NV12 | VideoFormat::I420 => DXGI_FORMAT_NV12,
731            VideoFormat::RGBA => DXGI_FORMAT_R8G8B8A8_UNORM,
732            VideoFormat::BGRA => DXGI_FORMAT_B8G8R8A8_UNORM,
733        }
734    }
735}