1use crate::{
2    makepad_derive_widget::*,
3    makepad_draw::*,
4    makepad_platform::{event::video_decoding::*},
5    widget::*,
6    VideoColorFormat,
7};
8use std::{
9    collections::VecDeque,
10    sync::mpsc::channel,
11    sync::{Arc, Mutex},
12    thread,
13    time::{Duration, Instant},
14};
15
16const MAX_FRAMES_TO_DECODE: usize = 20;
17const FRAME_BUFFER_LOW_WATER_MARK: usize = MAX_FRAMES_TO_DECODE / 3;
18
19live_design! {
25    VideoBase = {{Video}} {}
26}
27
28#[derive(Live)]
46pub struct Video {
47    #[live]
49    draw_bg: DrawColor,
50    #[walk]
51    walk: Walk,
52    #[live]
53    layout: Layout,
54    #[live]
55    scale: f64,
56
57    #[live]
58    source: LiveDependency,
59    #[rust]
60    texture: Option<Texture>,
61
62    #[live(false)]
64    is_looping: bool,
65    #[live(false)]
66    hold_to_pause: bool,
67    #[live(false)]
68    autoplay: bool,
69    #[rust]
70    playback_state: PlaybackState,
71    #[rust]
72    pause_time: Option<Instant>,
73    #[rust]
74    total_pause_duration: Duration,
75
76    #[rust]
78    video_width: usize,
79    #[rust]
80    video_height: usize,
81    #[rust]
82    total_duration: u128,
83    #[rust]
84    original_frame_rate: usize,
85    #[rust]
86    color_format: VideoColorFormat,
87
88    #[rust]
90    frames_buffer: SharedRingBuffer,
91
92    #[rust]
94    is_current_texture_preview: bool,
95    #[rust]
96    next_frame_ts: u128,
97    #[rust]
98    frame_ts_interval: f64,
99    #[rust]
100    start_time: Option<Instant>,
101    #[rust]
102    tick: Timer,
103
104    #[rust]
106    decoding_receiver: ToUIReceiver<Vec<u8>>,
107    #[rust]
108    decoding_state: DecodingState,
109    #[rust]
110    vec_pool: SharedVecPool,
111    #[rust]
112    available_to_fetch: bool,
113
114    #[rust]
115    id: LiveId,
116}
117
118#[derive(Clone, Default, PartialEq, WidgetRef)]
119pub struct VideoRef(WidgetRef);
120
121impl VideoRef {
122    pub fn begin_decoding(&mut self, cx: &mut Cx) {
123        if let Some(mut inner) = self.borrow_mut() {
124            inner.initialize_decoding(cx);
125        }
126    }
127
128    pub fn show_preview(&mut self, cx: &mut Cx) {
130        if let Some(mut inner) = self.borrow_mut() {
131            inner.show_preview(cx);
132        }
133    }
134
135    pub fn begin_playback(&mut self, cx: &mut Cx) {
137        if let Some(mut inner) = self.borrow_mut() {
138            inner.begin_playback(cx);
139        }
140    }
141
142    pub fn pause_playback(&self) {
143        if let Some(mut inner) = self.borrow_mut() {
144            inner.pause_playback();
145        }
146    }
147
148    pub fn resume_playback(&self) {
149        if let Some(mut inner) = self.borrow_mut() {
150            inner.resume_playback();
151        }
152    }
153
154    pub fn end_playback(&mut self, cx: &mut Cx) {
156        if let Some(mut inner) = self.borrow_mut() {
157            inner.end_playback(cx);
158        }
159    }
160}
161
162#[derive(Clone, Default, WidgetSet)]
163pub struct VideoSet(WidgetSet);
164
165impl VideoSet {}
166
167#[derive(Default, PartialEq)]
168enum DecodingState {
169    #[default]
170    NotStarted,
171    Initializing,
172    Initialized,
173    Decoding,
174    ChunkFinished,
175}
176
177#[derive(Default, PartialEq, Debug)]
178enum PlaybackState {
179    #[default]
180    NotStarted,
181    Previewing,
182    Playing,
183    Paused,
184    Finished,
185}
186
187impl LiveHook for Video {
188    fn before_live_design(cx: &mut Cx) {
189        register_widget!(cx, Video);
190    }
191
192    fn after_new_from_doc(&mut self, cx: &mut Cx) {
193        self.id = LiveId::unique();
194        if self.autoplay {
195            self.begin_playback(cx);
196        }
197    }
198}
199
200#[derive(Clone, WidgetAction)]
201pub enum VideoAction {
202    None,
203}
204
205impl Widget for Video {
206    fn redraw(&mut self, cx: &mut Cx) {
207        if self.texture.is_none() {
208            return;
209        }
210
211        self.draw_bg
212            .draw_vars
213            .set_texture(0, self.texture.as_ref().unwrap());
214
215        self.draw_bg.redraw(cx);
216    }
217
218    fn walk(&mut self, _cx: &mut Cx) -> Walk {
219        self.walk
220    }
221
222    fn draw_walk_widget(&mut self, cx: &mut Cx2d, walk: Walk) -> WidgetDraw {
223        self.draw_bg.draw_walk(cx, walk);
224        WidgetDraw::done()
225    }
226
227    fn handle_widget_event_with(
228        &mut self,
229        cx: &mut Cx,
230        event: &Event,
231        dispatch_action: &mut dyn FnMut(&mut Cx, WidgetActionItem),
232    ) {
233        let uid = self.widget_uid();
234        self.handle_event_with(cx, event, &mut |cx, action| {
235            dispatch_action(cx, WidgetActionItem::new(action.into(), uid));
236        });
237    }
238}
239
240impl Video {
241    pub fn handle_event_with(
242        &mut self,
243        cx: &mut Cx,
244        event: &Event,
245        _dispatch_action: &mut dyn FnMut(&mut Cx, VideoAction),
246    ) {
247        if let Event::VideoDecodingInitialized(event) = event {
248            if event.video_id == self.id {
249                self.handle_decoding_initialized(cx, event);
250            }
251        }
252
253        if let Event::VideoChunkDecoded(video_id) = event {
254            if *video_id == self.id {
255                self.decoding_state = DecodingState::ChunkFinished;
256                self.available_to_fetch = true;
257            }
258        }
259
260        if self.tick.is_event(event).is_some() {            
261            self.maybe_show_preview(cx);
262            self.maybe_advance_playback(cx);
263
264            if self.should_fetch() {
265                self.available_to_fetch = false;
266                cx.fetch_next_video_frames(self.id, MAX_FRAMES_TO_DECODE);
267            } else if self.should_request_decoding() {
268                let frames_to_decode = if self.playback_state == PlaybackState::Previewing {
269                    1
270                } else {
271                    MAX_FRAMES_TO_DECODE
272                };
273                cx.decode_next_video_chunk(self.id, frames_to_decode);
274                self.decoding_state = DecodingState::Decoding;
275            }
276        }
277
278        self.handle_gestures(cx, event);
279        self.handle_activity_events(event);
280        self.handle_errors(event);
281    }
282
283    fn initialize_decoding(&mut self, cx: &mut Cx) {
284        if self.decoding_state == DecodingState::NotStarted {
285            match cx.get_dependency(self.source.as_str()) {
286                Ok(data) => {
287                    cx.initialize_video_decoding(self.id, data, 100);
288                    self.decoding_state = DecodingState::Initializing;
289                }
290                Err(e) => {
291                    error!("initialize_decoding: resource not found {} {}", self.source.as_str(), e);
292                }
293            }
294        }
295    }
296
297    fn handle_decoding_initialized(&mut self, cx: &mut Cx, event: &VideoDecodingInitializedEvent) {
298        self.decoding_state = DecodingState::Initialized;
299        self.video_width = event.video_width as usize;
300        self.video_height = event.video_height as usize;
301        self.original_frame_rate = event.frame_rate;
302        self.total_duration = event.duration;
303        self.color_format = event.color_format;
304        self.frame_ts_interval = 1000000.0 / self.original_frame_rate as f64;
305
306        cx.decode_next_video_chunk(self.id, MAX_FRAMES_TO_DECODE + MAX_FRAMES_TO_DECODE / 2);
318        self.decoding_state = DecodingState::Decoding;
319
320        self.begin_buffering_thread(cx);
321        self.tick = cx.start_interval(8.0);
322    }
323
324    fn begin_buffering_thread(&mut self, cx: &mut Cx) {
325        let video_sender = self.decoding_receiver.sender();
326        cx.video_decoding_input(self.id, move |data| {
327            let _ = video_sender.send(data);
328        });
329
330        let frames_buffer = Arc::clone(&self.frames_buffer);
331        let vec_pool = Arc::clone(&self.vec_pool);
332
333        let video_width = self.video_width.clone();
334        let video_height = self.video_height.clone();
335        let color_format = self.color_format.clone();
336
337        let (_new_sender, new_receiver) = channel();
338        let old_receiver = std::mem::replace(&mut self.decoding_receiver.receiver, new_receiver);
339
340        thread::spawn(move || loop {
341            let frame_group = old_receiver.recv().unwrap();
342            deserialize_chunk(
343                Arc::clone(&frames_buffer),
344                Arc::clone(&vec_pool),
345                &frame_group,
346                video_width,
347                video_height,
348                color_format,
349            );
350        });
351    }
352
353    fn maybe_show_preview(&mut self, cx: &mut Cx) {
354        if self.playback_state == PlaybackState::Previewing {
355            if !self.is_current_texture_preview {
356                let current_frame = { self.frames_buffer.lock().unwrap().get() };
357                match current_frame {
358                    Some(current_frame) => {
359                        self.draw_bg.set_uniform(cx, id!(is_last_frame), &[0.0]);
360                        self.draw_bg.set_uniform(cx, id!(texture_available), &[1.0]);
361                        self.update_texture(cx, current_frame.pixel_data);
362                        self.is_current_texture_preview = true;
363                        self.redraw(cx);
364                    }
365                    None => {}
366                }
367            }
368        }
369    }
370
371    fn maybe_advance_playback(&mut self, cx: &mut Cx) {
372        if self.playback_state == PlaybackState::Playing {
373            let now = Instant::now();
374            let video_time_us = match self.start_time {
375                Some(start_time) => now.duration_since(start_time).as_micros(),
376                None => 0,
377            };
378
379            if video_time_us >= self.next_frame_ts || self.start_time.is_none() {
380                let maybe_current_frame = { self.frames_buffer.lock().unwrap().get() };
381
382                match maybe_current_frame {
383                    Some(current_frame) => {
384                        if self.start_time.is_none() {
385                            self.start_time = Some(now);
386                            self.draw_bg.set_uniform(cx, id!(is_last_frame), &[0.0]);
387                            self.draw_bg.set_uniform(cx, id!(texture_available), &[1.0]);
388                        }
389
390                        self.update_texture(cx, current_frame.pixel_data);
391                        self.redraw(cx);
392
393                        if current_frame.is_eos {
395                            self.next_frame_ts = 0;
396                            self.start_time = None;
397                            if !self.is_looping {
398                                self.draw_bg.set_uniform(cx, id!(is_last_frame), &[1.0]);
399                                self.playback_state = PlaybackState::Finished;
400                            }
401                        } else {
402                            self.next_frame_ts =
403                                current_frame.timestamp_us + self.frame_ts_interval.ceil() as u128;
404                        }
405                    }
406                    None => {}
408                }
409            }
410        }
411    }
412
413    fn update_texture(&mut self, cx: &mut Cx, pixel_data: Arc<Mutex<Vec<u32>>>) {
414        if self.texture.is_none() {
415            let texture = Texture::new(cx);
416            texture.set_desc(
417                cx,
418                TextureDesc {
419                    format: TextureFormat::ImageBGRA,
420                    width: Some(self.video_width),
421                    height: Some(self.video_height),
422                },
423            );
424            self.texture = Some(texture);
425        }
426
427        let texture = self.texture.as_mut().unwrap();
428
429        {
430            let mut data_locked = pixel_data.lock().unwrap();
431            texture.swap_image_u32(cx, &mut *data_locked);
432        }
433
434        self.vec_pool
435            .lock()
436            .unwrap()
437            .release(pixel_data.lock().unwrap().to_vec());
438    }
439
440    fn handle_gestures(&mut self, cx: &mut Cx, event: &Event) {
441        match event.hits(cx, self.draw_bg.area()) {
442            Hit::FingerDown(_fe) => {
443                if self.hold_to_pause {
444                    self.pause_playback();
445                }
446            }
447            Hit::FingerUp(_fe) => {
448                if self.hold_to_pause {
449                    self.resume_playback();
450                }
451            }
452            _ => (),
453        }
454    }
455
456    fn handle_activity_events(&mut self, event: &Event) {
457        match event {
458            Event::Pause => self.pause_playback(),
459            Event::Resume => self.resume_playback(),
460            _ => (),
461        }
462    }
463
464    fn handle_errors(&mut self, event: &Event) {
465        if let Event::VideoDecodingError(event) = event {
466            if event.video_id == self.id {
467                error!("Error decoding video with id {} : {}", self.id.0, event.error);
468            }
469        }
470    }
471
472    fn show_preview(&mut self, cx: &mut Cx) {
473        if self.playback_state != PlaybackState::Previewing {
474            if self.decoding_state == DecodingState::NotStarted {
475                self.initialize_decoding(cx);
476            }
477            self.playback_state = PlaybackState::Previewing;
478        }
479    }
480
481    fn begin_playback(&mut self, cx: &mut Cx) {
482        if self.decoding_state == DecodingState::NotStarted {
483            self.initialize_decoding(cx);
484        }
485        self.playback_state = PlaybackState::Playing;
486    }
487
488    fn pause_playback(&mut self) {
489        if self.playback_state != PlaybackState::Paused {
490            self.pause_time = Some(Instant::now());
491            self.playback_state = PlaybackState::Paused;
492       }
493    }
494
495    fn resume_playback(&mut self) {
496        if let Some(pause_time) = self.pause_time.take() {
497            let pause_duration = Instant::now().duration_since(pause_time);
498            self.total_pause_duration += pause_duration;
499            if let Some(start_time) = self.start_time.as_mut() {
500                *start_time += pause_duration;
501            }
502        }
503        self.playback_state = PlaybackState::Playing;
504    }
505
506    fn end_playback(&mut self, cx: &mut Cx) {
507        self.playback_state = PlaybackState::Finished;
508        self.start_time = None;
509        self.next_frame_ts = 0;
510        self.cleanup_decoding(cx);
511    }
512
513    fn should_fetch(&self) -> bool {
514        self.available_to_fetch && self.is_buffer_running_low()
515    }
516
517    fn should_request_decoding(&self) -> bool {
518        match self.decoding_state {
519            DecodingState::ChunkFinished => self.is_buffer_running_low(),
520            _ => false,
521        }
522    }
523
524    fn is_buffer_running_low(&self) -> bool {
525        self.frames_buffer.lock().unwrap().data.len() < FRAME_BUFFER_LOW_WATER_MARK
526    }
527
528    fn cleanup_decoding(&mut self, cx: &mut Cx) {
529        if self.decoding_state != DecodingState::NotStarted {
530            cx.cleanup_video_decoding(self.id);
531            self.frames_buffer.lock().unwrap().clear();
532            self.vec_pool.lock().unwrap().clear();
533            self.decoding_state = DecodingState::NotStarted;
534        }
535    }
536}
537
538type SharedRingBuffer = Arc<Mutex<RingBuffer>>;
539#[derive(Clone)]
540struct RingBuffer {
541    data: VecDeque<VideoFrame>,
542    last_added_index: Option<usize>,
543}
544
545impl RingBuffer {
546    fn get(&mut self) -> Option<VideoFrame> {
547        self.data.pop_front()
548    }
549
550    fn push(&mut self, frame: VideoFrame) {
551        self.data.push_back(frame);
552
553        match self.last_added_index {
554            None => {
555                self.last_added_index = Some(0);
556            }
557            Some(index) => {
558                self.last_added_index = Some(index + 1);
559            }
560        }
561    }
562
563    fn clear(&mut self) {
564        self.data.clear();
565        self.last_added_index = None;
566    }
567}
568
569impl Default for RingBuffer {
570    fn default() -> Self {
571        Self {
572            data: VecDeque::new(),
573            last_added_index: None,
574        }
575    }
576}
577
578#[derive(Clone, Default)]
579struct VideoFrame {
580    pixel_data: Arc<Mutex<Vec<u32>>>,
581    timestamp_us: u128,
582    is_eos: bool,
583}
584
585type SharedVecPool = Arc<Mutex<VecPool>>;
586#[derive(Default, Clone)]
587pub struct VecPool {
588    pool: Vec<Vec<u32>>,
589}
590
591impl VecPool {
592    pub fn acquire(&mut self, capacity: usize) -> Vec<u32> {
593        match self.pool.pop() {
594            Some(mut vec) => {
595                if vec.capacity() < capacity {
596                    vec.resize(capacity, 0);
597                }
598                vec
599            }
600            None => vec![0u32; capacity],
601        }
602    }
603
604    pub fn release(&mut self, vec: Vec<u32>) {
605        self.pool.push(vec);
606    }
607
608    pub fn clear(&mut self) {
609        self.pool.clear();
610    }
611}
612
613fn deserialize_chunk(
614    frames_buffer: SharedRingBuffer,
615    vec_pool: SharedVecPool,
616    frame_group: &[u8],
617    video_width: usize,
618    video_height: usize,
619    color_format: VideoColorFormat,
620) {
621    let mut cursor = 0;
622
623    while cursor < frame_group.len() {
625        let timestamp =
627            u64::from_be_bytes(frame_group[cursor..cursor + 8].try_into().unwrap()) as u128;
628        cursor += 8;
629        let y_stride =
630            u32::from_be_bytes(frame_group[cursor..cursor + 4].try_into().unwrap()) as usize;
631        cursor += 4;
632        let u_stride =
633            u32::from_be_bytes(frame_group[cursor..cursor + 4].try_into().unwrap()) as usize;
634        cursor += 4;
635        let v_stride =
636            u32::from_be_bytes(frame_group[cursor..cursor + 4].try_into().unwrap()) as usize;
637        cursor += 4;
638        let is_eos = u8::from_be_bytes(frame_group[cursor..cursor + 1].try_into().unwrap()) != 0;
639        cursor += 1;
640        let frame_length =
641            u32::from_be_bytes(frame_group[cursor..cursor + 4].try_into().unwrap()) as usize;
642        cursor += 4;
643
644        let frame_data_end = cursor + frame_length;
645        let pixel_data = &frame_group[cursor..frame_data_end];
646
647        let mut pixel_data_u32 = vec_pool
648            .lock()
649            .unwrap()
650            .acquire(video_width as usize * video_height as usize);
651
652        match color_format {
653            VideoColorFormat::YUV420Planar => planar_to_u32(
654                pixel_data,
655                video_width,
656                video_height,
657                y_stride,
658                u_stride,
659                v_stride,
660                &mut pixel_data_u32,
661            ),
662            VideoColorFormat::YUV420SemiPlanar => semi_planar_to_u32(
663                pixel_data,
664                video_width,
665                video_height,
666                y_stride,
667                u_stride,
668                &mut pixel_data_u32,
669            ),
670            VideoColorFormat::YUV420Flexible => todo!(),
671            VideoColorFormat::Unknown => todo!(),
672        };
673
674        frames_buffer.lock().unwrap().push(VideoFrame {
675            pixel_data: Arc::new(Mutex::new(pixel_data_u32)),
676            timestamp_us: timestamp,
677            is_eos,
678        });
679
680        cursor = frame_data_end;
681    }
682}
683
684fn planar_to_u32(
685    data: &[u8],
686    width: usize,
687    height: usize,
688    y_stride: usize,
689    u_stride: usize,
690    v_stride: usize,
691    packed_data: &mut [u32],
692) {
693    let mut y_idx = 0;
694
695    let y_start = 0;
696    let u_start = y_stride * height;
697    let v_start = u_start + u_stride * (height / 2);
698
699    for row in 0..height {
700        let y_row_start = y_start + row * y_stride;
701        let u_row_start = u_start + (row / 2) * u_stride;
702        let v_row_start = v_start + (row / 2) * v_stride;
703
704        for x in 0..width {
705            let y = data[y_row_start + x];
706            let u = data[u_row_start + x / 2];
707            let v = data[v_row_start + x / 2];
708
709            packed_data[y_idx] = (v as u32) << 16 | (u as u32) << 8 | (y as u32);
711
712            y_idx += 1;
713        }
714    }
715}
716
717fn semi_planar_to_u32(
718    data: &[u8],
719    width: usize,
720    height: usize,
721    y_stride: usize,
722    uv_stride: usize,
723    packed_data: &mut [u32],
724) {
725    let mut y_idx = 0;
726    let uv_start = y_stride * height;
727
728    for row in 0..height {
729        let y_start = row * y_stride;
730        let uv_row_start = uv_start + (row / 2) * uv_stride;
731
732        for x in 0..width {
733            let y = data[y_start + x];
734
735            let uv_idx = uv_row_start + (x / 2) * 2;
737            let u = data[uv_idx];
738            let v = data[uv_idx + 1];
739
740            packed_data[y_idx] = (v as u32) << 16 | (u as u32) << 8 | (y as u32);
742
743            y_idx += 1;
744        }
745    }
746}