blinc_media 0.5.1

Cross-platform audio/video for Blinc — royalty-free codecs only
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
//! Video decoding — RGBA frames from H.264 streams
//!
//! Desktop: OpenH264 (royalty-free, Cisco covers patents)
//! Mobile: platform decoders via native bridge
//!
//! # Example
//!
//! ```ignore
//! use blinc_media::video::VideoDecoder;
//!
//! let mut decoder = VideoDecoder::new();
//! if let Some(frame) = decoder.decode_nal(h264_packet) {
//!     canvas_render(frame.as_rgba(), frame.width, frame.height);
//! }
//! ```

use crate::frame::Frame;

/// Video playback state
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, Hash)]
pub enum VideoState {
    #[default]
    Idle,
    Playing,
    Paused,
    Ended,
}

/// Re-export Frame as VideoFrame for convenience
pub type VideoFrame = Frame;

/// Video decoder — extracts RGBA frames from H.264 NAL units
pub struct VideoDecoder {
    #[cfg(not(any(target_os = "android", target_os = "ios", target_arch = "wasm32")))]
    decoder: Option<openh264::decoder::Decoder>,
    state: VideoState,
}

impl VideoDecoder {
    pub fn new() -> Self {
        #[cfg(not(any(target_os = "android", target_os = "ios", target_arch = "wasm32")))]
        {
            use openh264::decoder::{DecoderConfig, Flush};
            let config = DecoderConfig::new().flush_after_decode(Flush::NoFlush);
            let api = openh264::OpenH264API::from_source();
            let decoder = openh264::decoder::Decoder::with_api_config(api, config).ok();
            Self {
                decoder,
                state: VideoState::Idle,
            }
        }

        #[cfg(any(target_os = "android", target_os = "ios", target_arch = "wasm32"))]
        {
            Self {
                state: VideoState::Idle,
            }
        }
    }

    /// Decode a single H.264 NAL unit and return an RGBA frame if available
    #[cfg(not(any(target_os = "android", target_os = "ios", target_arch = "wasm32")))]
    pub fn decode_nal(&mut self, nal_data: &[u8]) -> Option<Frame> {
        let decoder = self.decoder.as_mut()?;
        match decoder.decode(nal_data) {
            Ok(Some(yuv)) => {
                let (uv_w, uv_h) = yuv.dimensions_uv();
                let w = uv_w * 2;
                let h = uv_h * 2;
                let mut rgba = vec![0u8; w * h * 4];
                yuv.write_rgba8(&mut rgba);
                self.state = VideoState::Playing;
                Some(Frame::from_rgba(rgba, w as u32, h as u32))
            }
            Ok(None) => None,
            Err(e) => {
                tracing::warn!("decode error: {e:?}");
                None
            }
        }
    }

    #[cfg(any(target_os = "android", target_os = "ios", target_arch = "wasm32"))]
    pub fn decode_nal(&mut self, _nal_data: &[u8]) -> Option<Frame> {
        tracing::warn!("Use native_stream for mobile video decoding");
        None
    }

    pub fn state(&self) -> VideoState {
        self.state
    }
}

impl Default for VideoDecoder {
    fn default() -> Self {
        Self::new()
    }
}

/// Video player with playback controls
///
/// Wraps a decoder and provides play/pause/seek/stop.
/// Frames are delivered to a callback or polled via `current_frame()`.
///
/// Desktop: decodes locally via OpenH264
/// Mobile: delegates to platform player via native bridge
///
/// Cloning shares the same playback state — all clones see the same
/// frames, position, and play/pause state via the inner `Arc<Mutex>`.
#[derive(Clone)]
pub struct VideoPlayer {
    state: std::sync::Arc<std::sync::Mutex<VideoPlayerInner>>,
    #[allow(dead_code)]
    player_id: u64,
    pub playing_signal: blinc_core::State<bool>,
    pub position_signal: blinc_core::State<u64>,
    pub duration_signal: blinc_core::State<u64>,
    pub volume_signal: blinc_core::State<f32>,
}

struct VideoPlayerInner {
    playback_state: VideoState,
    volume: f32,
    current_frame: Option<std::sync::Arc<Frame>>,
    source: Option<String>,
    position_ms: u64,
    duration_ms: u64,
    #[cfg_attr(target_arch = "wasm32", allow(dead_code))]
    generation: u64,
    /// Incremented each time a new decoded frame is stored.
    /// Consumers compare against their last-seen value to skip
    /// redundant GPU uploads when the frame hasn't changed.
    frame_generation: u64,
    #[cfg(not(any(target_os = "android", target_os = "ios", target_arch = "wasm32")))]
    source_bytes: Option<std::sync::Arc<Vec<u8>>>,
}

static PLAYER_ID_COUNTER: std::sync::atomic::AtomicU64 = std::sync::atomic::AtomicU64::new(0);

impl VideoPlayer {
    pub fn new() -> Self {
        let id = PLAYER_ID_COUNTER.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
        let playing_signal = blinc_core::use_state_keyed(&format!("vp:{id}:playing"), || false);
        let position_signal = blinc_core::use_state_keyed(&format!("vp:{id}:position"), || 0u64);
        let duration_signal = blinc_core::use_state_keyed(&format!("vp:{id}:duration"), || 0u64);
        let volume_signal = blinc_core::use_state_keyed(&format!("vp:{id}:volume"), || 1.0f32);

        let state = std::sync::Arc::new(std::sync::Mutex::new(VideoPlayerInner {
            playback_state: VideoState::Idle,
            volume: 1.0,
            current_frame: None,
            source: None,
            position_ms: 0,
            duration_ms: 0,
            generation: 0,
            frame_generation: 0,
            #[cfg(not(any(target_os = "android", target_os = "ios", target_arch = "wasm32")))]
            source_bytes: None,
        }));

        let state_for_tick = state.clone();
        let pos_for_tick = position_signal.clone();
        #[cfg(target_arch = "wasm32")]
        let dur_for_tick = duration_signal.clone();
        #[cfg(target_arch = "wasm32")]
        let playing_for_tick = playing_signal.clone();
        let _tick_id = id;
        if let Some(handle) = blinc_animation::try_get_scheduler() {
            let redraw_handle = handle.clone();
            handle.register_tick_callback(move |_dt| {
                #[cfg(target_arch = "wasm32")]
                {
                    let mut inner = state_for_tick.lock().unwrap();
                    if inner.playback_state == VideoState::Playing {
                        let new_pos = crate::web_video::position_ms(_tick_id);
                        let new_dur = crate::web_video::duration_ms(_tick_id);
                        let pos_changed = inner.position_ms != new_pos;
                        let dur_changed = inner.duration_ms != new_dur;
                        inner.position_ms = new_pos;
                        inner.duration_ms = new_dur;
                        drop(inner);

                        // Only capture frame when video has advanced —
                        // getImageData is expensive (~8MB copy for 1080p)
                        if pos_changed {
                            if let Some(frame) = crate::web_video::capture_frame(_tick_id) {
                                let mut inner = state_for_tick.lock().unwrap();
                                inner.current_frame = Some(std::sync::Arc::new(frame));
                                inner.frame_generation += 1;
                            }
                            pos_for_tick.set(new_pos);
                            redraw_handle.request_redraw();
                        }
                        if dur_changed {
                            dur_for_tick.set(new_dur);
                        }

                        if crate::web_video::is_ended(_tick_id) {
                            state_for_tick.lock().unwrap().playback_state = VideoState::Ended;
                            playing_for_tick.set(false);
                        }
                    }
                }

                #[cfg(not(target_arch = "wasm32"))]
                {
                    let inner = state_for_tick.lock().unwrap();
                    if inner.playback_state == VideoState::Playing {
                        let pos = inner.position_ms;
                        drop(inner);
                        pos_for_tick.set(pos);
                        redraw_handle.request_redraw();
                    }
                }
            });
        }

        #[cfg(target_arch = "wasm32")]
        crate::web_video::create(id);

        Self {
            state,
            player_id: id,
            playing_signal,
            position_signal,
            duration_signal,
            volume_signal,
        }
    }

    fn sync_signals(&self) {
        let inner = self.state.lock().unwrap();
        let pos = inner.position_ms;
        let dur = inner.duration_ms;
        let vol = inner.volume;
        drop(inner);
        self.position_signal.set(pos);
        self.duration_signal.set(dur);
        self.volume_signal.set(vol);
    }

    /// Load a video source
    pub fn load(&self, path: &str) {
        {
            let mut inner = self.state.lock().unwrap();
            inner.source = Some(path.to_string());
            inner.playback_state = VideoState::Idle;
            inner.position_ms = 0;
        }
        self.sync_signals();

        #[cfg(any(target_os = "android", target_os = "ios"))]
        {
            let _ = blinc_core::native_bridge::native_call::<(), _>(
                "video",
                "load",
                vec![blinc_core::native_bridge::NativeValue::String(
                    path.to_string(),
                )],
            );
        }
    }

    /// Start or resume playback
    pub fn play(&self) {
        {
            let mut inner = self.state.lock().unwrap();
            if inner.playback_state == VideoState::Ended {
                drop(inner);
                self.replay();
                return;
            }
            inner.playback_state = VideoState::Playing;
        }
        self.playing_signal.set(true);
        if let Some(handle) = blinc_animation::try_get_scheduler() {
            handle.request_redraw();
        }

        #[cfg(any(target_os = "android", target_os = "ios"))]
        {
            let _ = blinc_core::native_bridge::native_call::<(), _>("video", "play", ());
        }
        #[cfg(target_arch = "wasm32")]
        crate::web_video::play(self.player_id);
    }

    /// Pause playback
    pub fn pause(&self) {
        self.state.lock().unwrap().playback_state = VideoState::Paused;
        self.playing_signal.set(false);
        if let Some(handle) = blinc_animation::try_get_scheduler() {
            handle.request_redraw();
        }

        #[cfg(any(target_os = "android", target_os = "ios"))]
        {
            let _ = blinc_core::native_bridge::native_call::<(), _>("video", "pause", ());
        }
        #[cfg(target_arch = "wasm32")]
        crate::web_video::pause(self.player_id);
    }

    /// Stop playback and reset position
    pub fn stop(&self) {
        {
            let mut inner = self.state.lock().unwrap();
            inner.playback_state = VideoState::Idle;
            inner.position_ms = 0;
            inner.current_frame = None;
        }
        self.sync_signals();

        #[cfg(any(target_os = "android", target_os = "ios"))]
        {
            let _ = blinc_core::native_bridge::native_call::<(), _>("video", "stop", ());
        }
        #[cfg(target_arch = "wasm32")]
        crate::web_video::pause(self.player_id);
    }

    /// Seek to a position in milliseconds.
    #[cfg(not(any(target_os = "android", target_os = "ios", target_arch = "wasm32")))]
    pub fn seek(&self, position_ms: u64) {
        let bytes = {
            let mut inner = self.state.lock().unwrap();
            inner.playback_state = VideoState::Idle;
            inner.position_ms = position_ms;
            inner.generation += 1;
            inner.source_bytes.clone()
        };
        self.position_signal.set(position_ms);
        std::thread::sleep(std::time::Duration::from_millis(15));
        if let Some(bytes) = bytes {
            self.start_decode_thread_from(bytes, position_ms);
        }
    }

    #[cfg(target_arch = "wasm32")]
    pub fn seek(&self, position_ms: u64) {
        self.state.lock().unwrap().position_ms = position_ms;
        self.position_signal.set(position_ms);
        crate::web_video::seek(self.player_id, position_ms);
    }

    #[cfg(any(target_os = "android", target_os = "ios"))]
    pub fn seek(&self, position_ms: u64) {
        self.state.lock().unwrap().position_ms = position_ms;
        self.sync_signals();
        let _ = blinc_core::native_bridge::native_call::<(), _>(
            "video",
            "seek",
            vec![blinc_core::native_bridge::NativeValue::Int64(
                position_ms as i64,
            )],
        );
    }

    /// Set volume (0.0 to 1.0)
    pub fn set_volume(&self, volume: f32) {
        self.state.lock().unwrap().volume = volume.clamp(0.0, 1.0);
        self.sync_signals();

        #[cfg(any(target_os = "android", target_os = "ios"))]
        {
            let _ = blinc_core::native_bridge::native_call::<(), _>(
                "video",
                "set_volume",
                vec![blinc_core::native_bridge::NativeValue::Float32(volume)],
            );
        }
        #[cfg(target_arch = "wasm32")]
        crate::web_video::set_volume(self.player_id, volume);
    }

    /// Get the current decoded frame (cheap Arc clone)
    pub fn current_frame(&self) -> Option<std::sync::Arc<Frame>> {
        self.state.lock().unwrap().current_frame.clone()
    }

    /// Get the current frame generation counter.
    /// Incremented each time a new decoded frame is stored.
    /// Consumers can compare against their last-seen value to skip
    /// redundant work (e.g. GPU texture uploads) when the frame
    /// hasn't changed.
    pub fn frame_generation(&self) -> u64 {
        self.state.lock().unwrap().frame_generation
    }

    /// Push a decoded frame (called by decoder thread or native bridge)
    pub fn push_frame(&self, frame: Frame) {
        let mut inner = self.state.lock().unwrap();
        inner.current_frame = Some(std::sync::Arc::new(frame));
        inner.frame_generation += 1;
        drop(inner);
        self.sync_signals();
    }

    /// Get playback state
    pub fn playback_state(&self) -> VideoState {
        self.state.lock().unwrap().playback_state
    }

    /// Get current position in milliseconds
    pub fn position_ms(&self) -> u64 {
        self.state.lock().unwrap().position_ms
    }

    /// Get duration in milliseconds
    pub fn duration_ms(&self) -> u64 {
        self.state.lock().unwrap().duration_ms
    }

    /// Get volume
    pub fn volume(&self) -> f32 {
        self.state.lock().unwrap().volume
    }

    /// Check if playing
    pub fn is_playing(&self) -> bool {
        self.playback_state() == VideoState::Playing
    }

    /// Load and play an MP4 file from disk.
    #[cfg(not(any(target_os = "android", target_os = "ios", target_arch = "wasm32")))]
    pub fn load_file(&self, path: &str) {
        let bytes =
            std::fs::read(path).unwrap_or_else(|e| panic!("failed to read video: {path}: {e}"));
        self.load_bytes(bytes);
    }

    /// Replay from the beginning using stored source bytes.
    #[cfg(not(any(target_os = "android", target_os = "ios", target_arch = "wasm32")))]
    pub fn replay(&self) {
        let bytes = {
            let mut inner = self.state.lock().unwrap();
            inner.playback_state = VideoState::Idle;
            inner.position_ms = 0;
            inner.current_frame = None;
            inner.source_bytes.clone()
        };
        self.sync_signals();
        std::thread::sleep(std::time::Duration::from_millis(10));
        if let Some(bytes) = bytes {
            self.start_decode_thread(bytes);
        }
    }

    #[cfg(target_arch = "wasm32")]
    pub fn replay(&self) {
        {
            let mut inner = self.state.lock().unwrap();
            inner.playback_state = VideoState::Idle;
            inner.position_ms = 0;
            inner.current_frame = None;
        }
        crate::web_video::seek(self.player_id, 0);
        self.play();
    }

    #[cfg(any(target_os = "android", target_os = "ios"))]
    pub fn replay(&self) {
        let _ = blinc_core::native_bridge::native_call::<(), _>("video", "replay", ());
    }

    /// Load and play an MP4 from in-memory bytes.
    #[cfg(not(any(target_os = "android", target_os = "ios", target_arch = "wasm32")))]
    pub fn load_bytes(&self, bytes: Vec<u8>) {
        let bytes = std::sync::Arc::new(bytes);
        self.state.lock().unwrap().source_bytes = Some(std::sync::Arc::clone(&bytes));
        self.start_decode_thread(bytes);
    }

    #[cfg(target_arch = "wasm32")]
    pub fn load_bytes(&self, bytes: Vec<u8>) {
        crate::web_video::load_bytes(self.player_id, &bytes);
    }

    #[cfg(not(any(target_os = "android", target_os = "ios", target_arch = "wasm32")))]
    fn start_decode_thread(&self, bytes: std::sync::Arc<Vec<u8>>) {
        self.start_decode_thread_from(bytes, 0);
    }

    #[cfg(not(any(target_os = "android", target_os = "ios", target_arch = "wasm32")))]
    fn start_decode_thread_from(&self, bytes: std::sync::Arc<Vec<u8>>, start_ms: u64) {
        let file_size = bytes.len() as u64;

        let mp4 = mp4::Mp4Reader::read_header(std::io::Cursor::new(bytes.as_ref()), file_size)
            .unwrap_or_else(|e| panic!("failed to parse MP4: {e}"));

        let video_track_id = mp4
            .tracks()
            .values()
            .find(|t| t.media_type().ok() == Some(mp4::MediaType::H264))
            .map(|t| t.track_id())
            .unwrap_or_else(|| panic!("no H.264 video track found"));

        let track = &mp4.tracks()[&video_track_id];
        let sample_count = track.sample_count();
        let duration_ms = track.duration().as_millis() as u64;

        let my_generation;
        {
            let mut inner = self.state.lock().unwrap();
            inner.duration_ms = duration_ms;
            inner.playback_state = VideoState::Playing;
            my_generation = inner.generation;
        }
        self.playing_signal.set(true);
        self.sync_signals();

        // Build the bitstream converter (MP4 length-prefixed → Annex B)
        // following the openh264 crate's official Mp4BitstreamConverter pattern
        let avc1 = track
            .trak
            .mdia
            .minf
            .stbl
            .stsd
            .avc1
            .as_ref()
            .expect("no AVC1 config in video track");
        let avcc = &avc1.avcc;
        let length_size = avcc.length_size_minus_one + 1;
        let sps_list: Vec<Vec<u8>> = avcc
            .sequence_parameter_sets
            .iter()
            .map(|n| n.bytes.clone())
            .collect();
        let pps_list: Vec<Vec<u8>> = avcc
            .picture_parameter_sets
            .iter()
            .map(|n| n.bytes.clone())
            .collect();

        let state = self.state.clone();
        let playing_sig = self.playing_signal.clone();
        let position_sig = self.position_signal.clone();

        std::thread::spawn(move || {
            let mut decoder = VideoDecoder::new();

            tracing::info!(
                "video: decode start — {} SPS, {} PPS, {} samples, {}ms, length_size={}",
                sps_list.len(),
                pps_list.len(),
                sample_count,
                duration_ms,
                length_size,
            );

            let mut mp4 = match mp4::Mp4Reader::read_header(
                std::io::Cursor::new(bytes.as_ref()),
                file_size,
            ) {
                Ok(r) => r,
                Err(e) => {
                    tracing::error!("failed to re-parse MP4: {e}");
                    return;
                }
            };

            let frame_duration = if sample_count > 0 {
                std::time::Duration::from_millis(duration_ms / sample_count as u64)
            } else {
                std::time::Duration::from_millis(33)
            };

            let mut decoded_count: u32 = 0;
            let mut playback_clock: Option<std::time::Instant> = None;

            // Calculate starting sample from seek position, snap to nearest keyframe
            let target_sample = if start_ms > 0 && duration_ms > 0 {
                let s = ((start_ms as f64 / duration_ms as f64) * sample_count as f64) as u32;
                s.min(sample_count).max(1)
            } else {
                1
            };

            // Find nearest sync sample (keyframe) at or before target
            let track_ref = &mp4.tracks()[&video_track_id];
            let start_sample = if let Some(ref stss) = track_ref.trak.mdia.minf.stbl.stss {
                let mut best = 1u32;
                for &sync_id in &stss.entries {
                    if sync_id <= target_sample {
                        best = sync_id;
                    } else {
                        break;
                    }
                }
                best
            } else {
                target_sample
            };

            tracing::info!(
                "video: seek to {}ms → target sample {}, keyframe {}",
                start_ms,
                target_sample,
                start_sample
            );
            let mut annex_b = Vec::new();

            // Feed SPS/PPS to decoder so it can decode from any position
            for sps in &sps_list {
                let mut buf = vec![0u8, 0, 0, 1];
                buf.extend_from_slice(sps);
                decoder.decode_nal(&buf);
            }
            for pps in &pps_list {
                let mut buf = vec![0u8, 0, 0, 1];
                buf.extend_from_slice(pps);
                decoder.decode_nal(&buf);
            }

            // Bitstream converter state (mirrors openh264 example)
            let mut new_idr = true;
            let mut sps_seen = false;
            let mut pps_seen = false;

            for sample_idx in start_sample..=sample_count {
                loop {
                    let inner = state.lock().unwrap();
                    if inner.generation != my_generation {
                        return;
                    }
                    let ps = inner.playback_state;
                    drop(inner);
                    match ps {
                        VideoState::Playing => break,
                        VideoState::Paused => {
                            std::thread::sleep(std::time::Duration::from_millis(50));
                            continue;
                        }
                        VideoState::Idle | VideoState::Ended => return,
                    }
                }

                let sample = match mp4.read_sample(video_track_id, sample_idx) {
                    Ok(Some(s)) => s,
                    Ok(None) => break,
                    Err(e) => {
                        tracing::warn!("failed to read sample {sample_idx}: {e}");
                        continue;
                    }
                };

                // Convert MP4 length-prefixed NALUs → Annex B with SPS/PPS injection
                annex_b.clear();
                let mut stream = sample.bytes.as_ref();

                while !stream.is_empty() {
                    if stream.len() < length_size as usize {
                        break;
                    }
                    let mut nal_size: u32 = 0;
                    for &byte in stream.iter().take(length_size as usize) {
                        nal_size = (nal_size << 8) | byte as u32;
                    }
                    stream = &stream[length_size as usize..];

                    if nal_size == 0 || nal_size as usize > stream.len() {
                        break;
                    }

                    let nal_data = &stream[..nal_size as usize];
                    let nal_type = nal_data[0] & 0x1F;
                    stream = &stream[nal_size as usize..];

                    match nal_type {
                        7 => sps_seen = true, // SPS
                        8 => pps_seen = true, // PPS
                        5 => {
                            // IDR slice — inject SPS/PPS if not already in-stream
                            if !new_idr && nal_data.len() > 1 && nal_data[1] & 0x80 != 0 {
                                new_idr = true;
                            }
                            if new_idr && !sps_seen && !pps_seen {
                                new_idr = false;
                                for sps in &sps_list {
                                    annex_b.extend_from_slice(&[0, 0, 1]);
                                    annex_b.extend_from_slice(sps);
                                }
                                for pps in &pps_list {
                                    annex_b.extend_from_slice(&[0, 0, 1]);
                                    annex_b.extend_from_slice(pps);
                                }
                            }
                            if new_idr && sps_seen && !pps_seen {
                                for pps in &pps_list {
                                    annex_b.extend_from_slice(&[0, 0, 1]);
                                    annex_b.extend_from_slice(pps);
                                }
                            }
                        }
                        _ => {}
                    }

                    annex_b.extend_from_slice(&[0, 0, 1]);
                    annex_b.extend_from_slice(nal_data);

                    if !new_idr && nal_type == 1 {
                        // Non-IDR slice — reset for next IDR
                        new_idr = true;
                        sps_seen = false;
                        pps_seen = false;
                    }
                }

                if let Some(frame) = decoder.decode_nal(&annex_b) {
                    decoded_count += 1;
                    // Skip display for samples before the seek target (decoder warmup)
                    if sample_idx >= target_sample {
                        let mut inner = state.lock().unwrap();
                        inner.current_frame = Some(std::sync::Arc::new(frame));
                        inner.frame_generation += 1;
                        inner.position_ms = (sample_idx as u64 * duration_ms) / sample_count as u64;
                    }
                }

                // Wall-clock pacing: sleep only for remaining time after decode
                if sample_idx >= target_sample {
                    let clock = playback_clock.get_or_insert_with(std::time::Instant::now);
                    let frames_since_start = (sample_idx - target_sample) + 1;
                    let target_time = *clock + frame_duration * frames_since_start;
                    let now = std::time::Instant::now();
                    if target_time > now {
                        std::thread::sleep(target_time - now);
                    }
                }
            }

            // Flush remaining buffered frames
            if let Some(ref mut raw_decoder) = decoder.decoder {
                if let Ok(remaining) = raw_decoder.flush_remaining() {
                    for yuv in remaining {
                        let (uv_w, uv_h) = yuv.dimensions_uv();
                        let w = uv_w * 2;
                        let h = uv_h * 2;
                        let mut rgba = vec![0u8; w * h * 4];
                        yuv.write_rgba8(&mut rgba);
                        decoded_count += 1;
                        let mut inner = state.lock().unwrap();
                        inner.current_frame = Some(std::sync::Arc::new(Frame::from_rgba(
                            rgba, w as u32, h as u32,
                        )));
                        inner.frame_generation += 1;
                        inner.position_ms = duration_ms;
                    }
                }
            }

            tracing::info!("video: decode complete — {decoded_count}/{sample_count} frames");

            state.lock().unwrap().playback_state = VideoState::Ended;
            playing_sig.set(false);
            position_sig.set(duration_ms);
        });
    }
}

impl Default for VideoPlayer {
    fn default() -> Self {
        Self::new()
    }
}

impl crate::player::Player for VideoPlayer {
    fn play(&self) {
        VideoPlayer::play(self);
    }
    fn pause(&self) {
        VideoPlayer::pause(self);
    }
    fn stop(&self) {
        VideoPlayer::stop(self);
    }
    fn seek(&self, position_ms: u64) {
        VideoPlayer::seek(self, position_ms);
    }
    fn position_ms(&self) -> u64 {
        VideoPlayer::position_ms(self)
    }
    fn duration_ms(&self) -> u64 {
        VideoPlayer::duration_ms(self)
    }
    fn volume(&self) -> f32 {
        VideoPlayer::volume(self)
    }
    fn set_volume(&self, volume: f32) {
        VideoPlayer::set_volume(self, volume);
    }
    fn is_playing(&self) -> bool {
        VideoPlayer::is_playing(self)
    }
}