Skip to main content

mp4_edit/
chunk_offset_builder.rs

1use std::collections::VecDeque;
2
3use crate::atom::{SampleSizeAtom, SampleToChunkAtom};
4
5#[derive(Debug)]
6pub struct ChunkInfo {
7    pub track_index: usize,
8    pub chunk_number: u32,
9    pub chunk_size: u64,
10    pub sample_indices: Vec<usize>,
11    pub sample_sizes: Vec<u32>,
12}
13
14#[derive(Debug)]
15pub struct ChunkOffset {
16    pub track_index: usize,
17    pub offset: u64,
18}
19
20#[derive(Clone)]
21pub struct ChunkOffsetBuilderTrack<'a> {
22    stsc: &'a SampleToChunkAtom,
23    stsz: &'a SampleSizeAtom,
24}
25
26impl<'a> ChunkOffsetBuilderTrack<'a> {
27    /// Build chunk information including sizes and sample mappings
28    pub fn build_chunk_info(&self, track_index: usize) -> impl Iterator<Item = ChunkInfo> + 'a {
29        self.stsc
30            .entries
31            .iter()
32            .zip(
33                self.stsc
34                    .entries
35                    .iter()
36                    .skip(1)
37                    .map(Some)
38                    .chain(std::iter::once(None)),
39            )
40            .scan(
41                (track_index, 0u32),
42                |(track_index, sample_index), (entry, next_entry)| {
43                    let next_first_chunk = if let Some(next_entry) = next_entry {
44                        next_entry.first_chunk
45                    } else {
46                        let remaining_samples =
47                            self.stsz.sample_count.saturating_sub(*sample_index);
48                        entry.first_chunk + remaining_samples.div_ceil(entry.samples_per_chunk)
49                    };
50
51                    let start_sample_index = *sample_index;
52                    *sample_index +=
53                        (next_first_chunk - entry.first_chunk) * entry.samples_per_chunk;
54
55                    // Process all chunks for this entry
56                    let track_index = *track_index;
57                    Some((entry.first_chunk..next_first_chunk).scan(
58                        (track_index, start_sample_index),
59                        |(track_index, sample_index), chunk_num| {
60                            let (sample_indices, sample_sizes, chunk_size) = self
61                                .stsz
62                                .sample_sizes()
63                                .enumerate()
64                                .skip(*sample_index as usize)
65                                .take(entry.samples_per_chunk as usize)
66                                .fold(
67                                    (
68                                        Vec::with_capacity(entry.samples_per_chunk as usize),
69                                        Vec::with_capacity(entry.samples_per_chunk as usize),
70                                        0u64,
71                                    ),
72                                    |(mut sample_indices, mut sample_sizes, mut chunk_size),
73                                     (i, size)| {
74                                        sample_indices.push(i);
75                                        sample_sizes.push(*size);
76                                        chunk_size += u64::from(*size);
77                                        (sample_indices, sample_sizes, chunk_size)
78                                    },
79                                );
80                            *sample_index += entry.samples_per_chunk;
81
82                            Some(ChunkInfo {
83                                track_index: *track_index,
84                                chunk_number: chunk_num,
85                                chunk_size,
86                                sample_indices,
87                                sample_sizes,
88                            })
89                        },
90                    ))
91                },
92            )
93            .flatten()
94    }
95}
96
97#[derive(Debug, Clone)]
98#[non_exhaustive]
99pub struct BuildMetadata {
100    pub total_size: u64,
101}
102
103pub struct ChunkOffsetBuilder<'a> {
104    tracks: Vec<ChunkOffsetBuilderTrack<'a>>,
105}
106
107impl Default for ChunkOffsetBuilder<'_> {
108    fn default() -> Self {
109        Self::new()
110    }
111}
112
113impl<'a> ChunkOffsetBuilder<'a> {
114    pub fn new() -> Self {
115        Self { tracks: Vec::new() }
116    }
117
118    pub fn with_capacity(capacity: usize) -> Self {
119        Self {
120            tracks: Vec::with_capacity(capacity),
121        }
122    }
123
124    pub fn add_track(&mut self, stsc: &'a SampleToChunkAtom, stsz: &'a SampleSizeAtom) {
125        self.tracks.push(ChunkOffsetBuilderTrack { stsc, stsz });
126    }
127
128    /// Build interleaved chunk information including sizes and sample mappings
129    pub fn build_chunk_info(&self) -> impl Iterator<Item = ChunkInfo> + 'a {
130        let mut iters = self
131            .tracks
132            .clone()
133            .into_iter()
134            .enumerate()
135            .map(|(track_index, track)| track.build_chunk_info(track_index))
136            .collect::<VecDeque<_>>();
137
138        // round-robin chunks from each track
139        std::iter::from_fn(move || {
140            while let Some(mut it) = iters.pop_front() {
141                if let Some(item) = it.next() {
142                    iters.push_back(it);
143                    return Some(item);
144                }
145            }
146            None
147        })
148    }
149
150    /// Build interleaved chunk offsets for each track given a starting offset
151    pub fn build_chunk_offsets(&self, start_offset: u64) -> (Vec<Vec<u64>>, BuildMetadata) {
152        let tracks: Vec<Vec<u64>> = (0..self.tracks.len()).map(|_| Vec::new()).collect();
153
154        let (_, chunk_offsets, total_size) = self.build_chunk_info().fold(
155            (start_offset, tracks, 0),
156            |(mut current_offset, mut tracks, size), chunk| {
157                let chunk_offset = current_offset;
158                current_offset += chunk.chunk_size;
159                tracks[chunk.track_index].push(chunk_offset);
160                (current_offset, tracks, size + chunk.chunk_size)
161            },
162        );
163
164        (chunk_offsets, BuildMetadata { total_size })
165    }
166
167    /// Build interleaved chunk offsets preserving the original order based on input chunk offsets
168    pub fn build_chunk_offsets_ordered(
169        &self,
170        original_chunk_offsets: Vec<&[u64]>,
171        start_offset: u64,
172    ) -> (Vec<Vec<u64>>, BuildMetadata) {
173        struct ChunkOffsetSortable {
174            original_offset: u64,
175            track_index: usize,
176            chunk_size: u64,
177        }
178
179        // Allocate intermediate and output `Vec`s
180        let (total_chunks, mut tracks): (usize, Vec<Vec<u64>>) =
181            original_chunk_offsets.iter().fold(
182                (0, Vec::with_capacity(original_chunk_offsets.len())),
183                |(mut total, mut tracks), offsets| {
184                    total += offsets.len();
185                    tracks.push(Vec::with_capacity(offsets.len()));
186                    (total, tracks)
187                },
188            );
189        let mut all_chunks = Vec::with_capacity(total_chunks);
190
191        // Collect chunk order information
192        for (track_index, track) in self.tracks.iter().enumerate() {
193            for (chunk_idx, chunk) in track.build_chunk_info(track_index).enumerate() {
194                let original_offset = original_chunk_offsets[track_index][chunk_idx];
195                all_chunks.push(ChunkOffsetSortable {
196                    original_offset,
197                    track_index: chunk.track_index,
198                    chunk_size: chunk.chunk_size,
199                });
200            }
201        }
202
203        // Sort by original offset to maintain interleaving order
204        all_chunks.sort_unstable_by_key(|chunk| chunk.original_offset);
205
206        // Calculate new offsets maintaining the original interleaving order
207        let mut total_size = 0;
208        let mut current_offset = start_offset;
209        for chunk in all_chunks {
210            tracks[chunk.track_index].push(current_offset);
211            current_offset += chunk.chunk_size;
212            total_size += chunk.chunk_size;
213        }
214
215        (tracks, BuildMetadata { total_size })
216    }
217}
218
219#[cfg(test)]
220mod tests {
221    use crate::atom::stsc::SampleToChunkEntry;
222
223    use super::*;
224
225    #[test]
226    fn test_chunk_offset_calculation() {
227        let stsc_entries = vec![
228            SampleToChunkEntry {
229                first_chunk: 1,
230                samples_per_chunk: 2,
231                sample_description_index: 1,
232            },
233            SampleToChunkEntry {
234                first_chunk: 3,
235                samples_per_chunk: 3,
236                sample_description_index: 1,
237            },
238        ];
239
240        let stsc = SampleToChunkAtom {
241            version: 0,
242            flags: [0, 0, 0],
243            entries: stsc_entries.into(),
244        };
245
246        let stsz = SampleSizeAtom {
247            version: 0,
248            flags: [0u8; 3],
249            sample_size: 0,
250            sample_count: 7,
251            entry_sizes: vec![100, 200, 150, 250, 300, 400, 500].into(),
252        };
253
254        let mut builder = ChunkOffsetBuilder::with_capacity(1);
255        builder.add_track(&stsc, &stsz);
256        let (offsets, meta) = builder.build_chunk_offsets(0);
257        let offsets = &offsets[0];
258
259        // Expected:
260        // Chunk 1: samples 0,1 (100+200=300 bytes) -> offset 0
261        // Chunk 2: samples 2,3 (150+250=400 bytes) -> offset 300
262        // Chunk 3: samples 4,5,6 (300+400+500=1200 bytes) -> offset 700
263
264        assert_eq!(offsets.len(), 3);
265        assert_eq!(offsets[0], 0); // Chunk 1 starts at 0
266        assert_eq!(offsets[1], 300); // Chunk 2 starts at 300
267        assert_eq!(offsets[2], 700); // Chunk 3 starts at 700
268        assert_eq!(
269            meta.total_size,
270            stsz.entry_sizes.iter().map(|s| *s as u64).sum::<u64>()
271        );
272    }
273
274    #[test]
275    fn test_chunk_info_generation() {
276        let stsc_entries = vec![
277            SampleToChunkEntry {
278                first_chunk: 1,
279                samples_per_chunk: 2,
280                sample_description_index: 1,
281            },
282            SampleToChunkEntry {
283                first_chunk: 3,
284                samples_per_chunk: 1,
285                sample_description_index: 1,
286            },
287        ];
288
289        let stsc = SampleToChunkAtom {
290            version: 0,
291            flags: [0, 0, 0],
292            entries: stsc_entries.into(),
293        };
294
295        let stsz = SampleSizeAtom {
296            version: 0,
297            flags: [0u8; 3],
298            sample_size: 0,
299            sample_count: 5,
300            entry_sizes: vec![100, 200, 300, 400, 500].into(),
301        };
302
303        let mut builder = ChunkOffsetBuilder::with_capacity(1);
304        builder.add_track(&stsc, &stsz);
305        let chunk_info = builder.build_chunk_info().collect::<Vec<_>>();
306
307        assert_eq!(chunk_info.len(), 3);
308
309        // Chunk 1: 2 samples (0, 1)
310        assert_eq!(chunk_info[0].chunk_number, 1);
311        assert_eq!(chunk_info[0].chunk_size, 300); // 100 + 200
312        assert_eq!(chunk_info[0].sample_indices, vec![0, 1]);
313
314        // Chunk 2: 2 samples (2, 3)
315        assert_eq!(chunk_info[1].chunk_number, 2);
316        assert_eq!(chunk_info[1].chunk_size, 700); // 300 + 400
317        assert_eq!(chunk_info[1].sample_indices, vec![2, 3]);
318
319        // Chunk 3: 1 sample (4)
320        assert_eq!(chunk_info[2].chunk_number, 3);
321        assert_eq!(chunk_info[2].chunk_size, 500); // 500
322        assert_eq!(chunk_info[2].sample_indices, vec![4]);
323    }
324
325    #[test]
326    fn test_edge_case_empty_samples() {
327        let stsc_entries = vec![SampleToChunkEntry {
328            first_chunk: 1,
329            samples_per_chunk: 1,
330            sample_description_index: 1,
331        }];
332
333        let stsc = SampleToChunkAtom {
334            version: 0,
335            flags: [0, 0, 0],
336            entries: stsc_entries.into(),
337        };
338
339        let stsz = SampleSizeAtom {
340            version: 0,
341            flags: [0u8; 3],
342            sample_size: 0,
343            sample_count: 0,
344            entry_sizes: vec![].into(),
345        };
346
347        let mut builder = ChunkOffsetBuilder::with_capacity(1);
348        builder.add_track(&stsc, &stsz);
349        let chunk_info = builder.build_chunk_info().collect::<Vec<_>>();
350
351        assert_eq!(chunk_info.len(), 0);
352    }
353
354    #[test]
355    fn test_track_interleaving() {
356        // Track 1: 2 chunks with 2 samples each
357        let stsc_entries_1 = vec![SampleToChunkEntry {
358            first_chunk: 1,
359            samples_per_chunk: 2,
360            sample_description_index: 1,
361        }];
362
363        let stsc_1 = SampleToChunkAtom {
364            version: 0,
365            flags: [0, 0, 0],
366            entries: stsc_entries_1.into(),
367        };
368
369        let stsz_1 = SampleSizeAtom {
370            version: 0,
371            flags: [0u8; 3],
372            sample_size: 0,
373            sample_count: 4,
374            entry_sizes: vec![100, 200, 150, 250].into(),
375        };
376
377        // Track 2: 3 chunks with 1 sample each
378        let stsc_entries_2 = vec![SampleToChunkEntry {
379            first_chunk: 1,
380            samples_per_chunk: 1,
381            sample_description_index: 1,
382        }];
383
384        let stsc_2 = SampleToChunkAtom {
385            version: 0,
386            flags: [0, 0, 0],
387            entries: stsc_entries_2.into(),
388        };
389
390        let stsz_2 = SampleSizeAtom {
391            version: 0,
392            flags: [0u8; 3],
393            sample_size: 0,
394            sample_count: 3,
395            entry_sizes: vec![300, 400, 500].into(),
396        };
397
398        let mut builder = ChunkOffsetBuilder::with_capacity(2);
399        builder.add_track(&stsc_1, &stsz_1);
400        builder.add_track(&stsc_2, &stsz_2);
401
402        let chunk_info = builder.build_chunk_info().collect::<Vec<_>>();
403
404        // Expected interleaving: T1C1, T2C1, T1C2, T2C2, T2C3
405        assert_eq!(chunk_info.len(), 5);
406
407        // Track 1, Chunk 1: samples 0,1 (track_index=0)
408        assert_eq!(chunk_info[0].track_index, 0);
409        assert_eq!(chunk_info[0].chunk_number, 1);
410        assert_eq!(chunk_info[0].chunk_size, 300); // 100 + 200
411        assert_eq!(chunk_info[0].sample_indices, vec![0, 1]);
412
413        // Track 2, Chunk 1: sample 0 (track_index=1)
414        assert_eq!(chunk_info[1].track_index, 1);
415        assert_eq!(chunk_info[1].chunk_number, 1);
416        assert_eq!(chunk_info[1].chunk_size, 300); // 300
417        assert_eq!(chunk_info[1].sample_indices, vec![0]);
418
419        // Track 1, Chunk 2: samples 2,3 (track_index=0)
420        assert_eq!(chunk_info[2].track_index, 0);
421        assert_eq!(chunk_info[2].chunk_number, 2);
422        assert_eq!(chunk_info[2].chunk_size, 400); // 150 + 250
423        assert_eq!(chunk_info[2].sample_indices, vec![2, 3]);
424
425        // Track 2, Chunk 2: sample 1 (track_index=1)
426        assert_eq!(chunk_info[3].track_index, 1);
427        assert_eq!(chunk_info[3].chunk_number, 2);
428        assert_eq!(chunk_info[3].chunk_size, 400); // 400
429        assert_eq!(chunk_info[3].sample_indices, vec![1]);
430
431        // Track 2, Chunk 3: sample 2 (track_index=1)
432        assert_eq!(chunk_info[4].track_index, 1);
433        assert_eq!(chunk_info[4].chunk_number, 3);
434        assert_eq!(chunk_info[4].chunk_size, 500); // 500
435        assert_eq!(chunk_info[4].sample_indices, vec![2]);
436
437        // Test chunk offsets are calculated correctly with interleaving
438        let (offsets, meta) = builder.build_chunk_offsets(0);
439
440        // Track 1 offsets: [0, 600] (T1C1 at 0, T1C2 at 0+300+300=600)
441        assert_eq!(offsets[0], vec![0, 600]);
442
443        // Track 2 offsets: [300, 1000, 1400] (T2C1 at 300, T2C2 at 300+300+400=1000, T2C3 at 1000+400=1400)
444        assert_eq!(offsets[1], vec![300, 1000, 1400]);
445
446        assert_eq!(
447            meta.total_size,
448            stsz_1
449                .entry_sizes
450                .iter()
451                .cloned()
452                .chain(stsz_2.entry_sizes.iter().cloned())
453                .map(|s| s as u64)
454                .sum::<u64>()
455        );
456    }
457
458    #[test]
459    fn test_build_chunk_offsets_ordered() {
460        // Track 1: 2 chunks
461        let stsc_entries_1 = vec![SampleToChunkEntry {
462            first_chunk: 1,
463            samples_per_chunk: 2,
464            sample_description_index: 1,
465        }];
466
467        let stsc_1 = SampleToChunkAtom {
468            version: 0,
469            flags: [0, 0, 0],
470            entries: stsc_entries_1.into(),
471        };
472
473        let stsz_1 = SampleSizeAtom {
474            version: 0,
475            flags: [0u8; 3],
476            sample_size: 0,
477            sample_count: 4,
478            entry_sizes: vec![100, 200, 150, 250].into(),
479        };
480
481        // Track 2: 2 chunks
482        let stsc_entries_2 = vec![SampleToChunkEntry {
483            first_chunk: 1,
484            samples_per_chunk: 1,
485            sample_description_index: 1,
486        }];
487
488        let stsc_2 = SampleToChunkAtom {
489            version: 0,
490            flags: [0, 0, 0],
491            entries: stsc_entries_2.into(),
492        };
493
494        let stsz_2 = SampleSizeAtom {
495            version: 0,
496            flags: [0u8; 3],
497            sample_size: 0,
498            sample_count: 2,
499            entry_sizes: vec![300, 400].into(),
500        };
501
502        let mut builder = ChunkOffsetBuilder::with_capacity(2);
503        builder.add_track(&stsc_1, &stsz_1);
504        builder.add_track(&stsc_2, &stsz_2);
505
506        // Original chunk offsets in a specific order:
507        // Track 1: chunks at offsets [1000, 2000]
508        // Track 2: chunks at offsets [500, 1500]
509        // This means the order should be: T2C1 (500), T1C1 (1000), T2C2 (1500), T1C2 (2000)
510        let original_offsets_track_1 = vec![1000u64, 2000u64];
511        let original_offsets_track_2 = vec![500u64, 1500u64];
512        let original_offsets = vec![
513            original_offsets_track_1.as_slice(),
514            original_offsets_track_2.as_slice(),
515        ];
516
517        let (new_offsets, meta) = builder.build_chunk_offsets_ordered(original_offsets, 0);
518
519        // With the original ordering (T2C1, T1C1, T2C2, T1C2) and chunk sizes:
520        // T2C1: 300 bytes -> offset 0
521        // T1C1: 300 bytes -> offset 300
522        // T2C2: 400 bytes -> offset 600
523        // T1C2: 400 bytes -> offset 1000
524
525        // Track 1 chunks should be at offsets [300, 1000]
526        assert_eq!(new_offsets[0], vec![300, 1000]);
527
528        // Track 2 chunks should be at offsets [0, 600]
529        assert_eq!(new_offsets[1], vec![0, 600]);
530
531        assert_eq!(
532            meta.total_size,
533            stsz_1
534                .entry_sizes
535                .iter()
536                .cloned()
537                .chain(stsz_2.entry_sizes.iter().cloned())
538                .map(|s| s as u64)
539                .sum::<u64>()
540        );
541    }
542
543    #[test]
544    fn test_build_chunk_offsets_ordered_single_track() {
545        let stsc_entries = vec![SampleToChunkEntry {
546            first_chunk: 1,
547            samples_per_chunk: 3,
548            sample_description_index: 1,
549        }];
550
551        let stsc = SampleToChunkAtom {
552            version: 0,
553            flags: [0, 0, 0],
554            entries: stsc_entries.into(),
555        };
556
557        let stsz = SampleSizeAtom {
558            version: 0,
559            flags: [0u8; 3],
560            sample_size: 0,
561            sample_count: 6,
562            entry_sizes: vec![100, 200, 300, 150, 250, 350].into(),
563        };
564
565        let mut builder = ChunkOffsetBuilder::with_capacity(1);
566        builder.add_track(&stsc, &stsz);
567
568        // Single track with 2 chunks at original offsets [5000, 10000]
569        let original_offsets_track_1 = vec![5000u64, 10000u64];
570        let original_offsets = vec![original_offsets_track_1.as_slice()];
571
572        let (new_offsets, meta) = builder.build_chunk_offsets_ordered(original_offsets, 0);
573
574        // Chunk 1: samples 0,1,2 -> sizes 100+200+300 = 600
575        // Chunk 2: samples 3,4,5 -> sizes 150+250+350 = 750
576        // Sequential placement: Chunk 1 at 0, Chunk 2 at 600
577        assert_eq!(new_offsets[0], vec![0, 600]);
578
579        assert_eq!(
580            meta.total_size,
581            stsz.entry_sizes
582                .iter()
583                .cloned()
584                .map(|s| s as u64)
585                .sum::<u64>()
586        );
587    }
588
589    #[test]
590    fn test_build_chunk_offsets_ordered_non_zero_start() {
591        let stsc_entries = vec![SampleToChunkEntry {
592            first_chunk: 1,
593            samples_per_chunk: 1,
594            sample_description_index: 1,
595        }];
596
597        let stsc = SampleToChunkAtom {
598            version: 0,
599            flags: [0, 0, 0],
600            entries: stsc_entries.into(),
601        };
602
603        let stsz = SampleSizeAtom {
604            version: 0,
605            flags: [0u8; 3],
606            sample_size: 0,
607            sample_count: 3,
608            entry_sizes: vec![100, 200, 300].into(),
609        };
610
611        let mut builder = ChunkOffsetBuilder::with_capacity(1);
612        builder.add_track(&stsc, &stsz);
613
614        let original_offsets_track_1 = vec![1000u64, 2000u64, 3000u64];
615        let original_offsets = vec![original_offsets_track_1.as_slice()];
616        let start_offset = 50000u64;
617
618        let (new_offsets, meta) =
619            builder.build_chunk_offsets_ordered(original_offsets, start_offset);
620
621        // Starting at 50000, chunks of sizes 100, 200, 300
622        assert_eq!(new_offsets[0], vec![50000, 50100, 50300]);
623
624        assert_eq!(
625            meta.total_size,
626            stsz.entry_sizes
627                .iter()
628                .cloned()
629                .map(|s| s as u64)
630                .sum::<u64>()
631        );
632    }
633
634    #[test]
635    fn test_build_chunk_offsets_ordered_interleaving() {
636        // Track 1: 3 chunks with different sample counts
637        let stsc_entries_1 = vec![
638            SampleToChunkEntry {
639                first_chunk: 1,
640                samples_per_chunk: 1,
641                sample_description_index: 1,
642            },
643            SampleToChunkEntry {
644                first_chunk: 2,
645                samples_per_chunk: 2,
646                sample_description_index: 1,
647            },
648        ];
649
650        let stsc_1 = SampleToChunkAtom {
651            version: 0,
652            flags: [0, 0, 0],
653            entries: stsc_entries_1.into(),
654        };
655
656        let stsz_1 = SampleSizeAtom {
657            version: 0,
658            flags: [0u8; 3],
659            sample_size: 0,
660            sample_count: 5,
661            entry_sizes: vec![100, 150, 200, 250, 300].into(),
662        };
663
664        // Track 2: 2 chunks
665        let stsc_entries_2 = vec![SampleToChunkEntry {
666            first_chunk: 1,
667            samples_per_chunk: 2,
668            sample_description_index: 1,
669        }];
670
671        let stsc_2 = SampleToChunkAtom {
672            version: 0,
673            flags: [0, 0, 0],
674            entries: stsc_entries_2.into(),
675        };
676
677        let stsz_2 = SampleSizeAtom {
678            version: 0,
679            flags: [0u8; 3],
680            sample_size: 0,
681            sample_count: 4,
682            entry_sizes: vec![80, 120, 160, 240].into(),
683        };
684
685        let mut builder = ChunkOffsetBuilder::with_capacity(2);
686        builder.add_track(&stsc_1, &stsz_1);
687        builder.add_track(&stsc_2, &stsz_2);
688
689        // Original offsets creating interleaving:
690        // Track 1: [100, 300, 700] (chunk 1: sample 0, chunk 2: samples 1-2, chunk 3: samples 3-4)
691        // Track 2: [200, 600] (chunk 1: samples 0-1, chunk 2: samples 2-3)
692        // Order should be: T1C1 (100), T2C1 (200), T1C2 (300), T2C2 (600), T1C3 (700)
693        let original_offsets_track_1 = vec![100u64, 300u64, 700u64];
694        let original_offsets_track_2 = vec![200u64, 600u64];
695        let original_offsets = vec![
696            original_offsets_track_1.as_slice(),
697            original_offsets_track_2.as_slice(),
698        ];
699
700        let (new_offsets, meta) = builder.build_chunk_offsets_ordered(original_offsets, 0);
701
702        // Chunk sizes:
703        // T1C1: 100 bytes (sample 0)
704        // T2C1: 200 bytes (samples 0-1: 80+120)
705        // T1C2: 350 bytes (samples 1-2: 150+200)
706        // T2C2: 400 bytes (samples 2-3: 160+240)
707        // T1C3: 550 bytes (samples 3-4: 250+300)
708        //
709        // Sequential placement:
710        // T1C1 at 0, T2C1 at 100, T1C2 at 300, T2C2 at 650, T1C3 at 1050
711
712        assert_eq!(new_offsets[0], vec![0, 300, 1050]);
713        assert_eq!(new_offsets[1], vec![100, 650]);
714
715        assert_eq!(
716            meta.total_size,
717            stsz_1
718                .entry_sizes
719                .iter()
720                .cloned()
721                .chain(stsz_2.entry_sizes.iter().cloned())
722                .map(|s| s as u64)
723                .sum::<u64>()
724        );
725    }
726
727    #[test]
728    fn test_build_chunk_offsets_ordered_different_chunk_sizes() {
729        // Track 1: Large chunks
730        let stsc_entries_1 = vec![SampleToChunkEntry {
731            first_chunk: 1,
732            samples_per_chunk: 4,
733            sample_description_index: 1,
734        }];
735
736        let stsc_1 = SampleToChunkAtom {
737            version: 0,
738            flags: [0, 0, 0],
739            entries: stsc_entries_1.into(),
740        };
741
742        let stsz_1 = SampleSizeAtom {
743            version: 0,
744            flags: [0u8; 3],
745            sample_size: 0,
746            sample_count: 8,
747            entry_sizes: vec![1000; 8].into(), // All samples are 1000 bytes
748        };
749
750        // Track 2: Small chunks
751        let stsc_entries_2 = vec![SampleToChunkEntry {
752            first_chunk: 1,
753            samples_per_chunk: 1,
754            sample_description_index: 1,
755        }];
756
757        let stsc_2 = SampleToChunkAtom {
758            version: 0,
759            flags: [0, 0, 0],
760            entries: stsc_entries_2.into(),
761        };
762
763        let stsz_2 = SampleSizeAtom {
764            version: 0,
765            flags: [0u8; 3],
766            sample_size: 0,
767            sample_count: 4,
768            entry_sizes: vec![50; 4].into(), // All samples are 50 bytes
769        };
770
771        let mut builder = ChunkOffsetBuilder::with_capacity(2);
772        builder.add_track(&stsc_1, &stsz_1);
773        builder.add_track(&stsc_2, &stsz_2);
774
775        // Interleave small chunks between large chunks
776        // T1: [1000, 10000] (chunks of 4000 bytes each)
777        // T2: [2000, 3000, 4000, 5000] (chunks of 50 bytes each)
778        let original_offsets_track_1 = vec![1000u64, 10000u64];
779        let original_offsets_track_2 = vec![2000u64, 3000u64, 4000u64, 5000u64];
780        let original_offsets = vec![
781            original_offsets_track_1.as_slice(),
782            original_offsets_track_2.as_slice(),
783        ];
784
785        let (new_offsets, meta) = builder.build_chunk_offsets_ordered(original_offsets, 0);
786
787        // Order: T1C1 (1000), T2C1 (2000), T2C2 (3000), T2C3 (4000), T2C4 (5000), T1C2 (10000)
788        // Sizes: 4000, 50, 50, 50, 50, 4000
789        // Offsets: 0, 4000, 4050, 4100, 4150, 4200
790        assert_eq!(new_offsets[0], vec![0, 4200]);
791        assert_eq!(new_offsets[1], vec![4000, 4050, 4100, 4150]);
792
793        assert_eq!(
794            meta.total_size,
795            stsz_1
796                .entry_sizes
797                .iter()
798                .cloned()
799                .chain(stsz_2.entry_sizes.iter().cloned())
800                .map(|s| s as u64)
801                .sum::<u64>()
802        );
803    }
804
805    #[test]
806    fn test_build_chunk_offsets_ordered_empty_track_handling() {
807        // Track 1: Has chunks
808        let stsc_entries_1 = vec![SampleToChunkEntry {
809            first_chunk: 1,
810            samples_per_chunk: 2,
811            sample_description_index: 1,
812        }];
813
814        let stsc_1 = SampleToChunkAtom {
815            version: 0,
816            flags: [0, 0, 0],
817            entries: stsc_entries_1.into(),
818        };
819
820        let stsz_1 = SampleSizeAtom {
821            version: 0,
822            flags: [0u8; 3],
823            sample_size: 0,
824            sample_count: 4,
825            entry_sizes: vec![100, 200, 300, 400].into(),
826        };
827
828        // Track 2: Empty (no samples)
829        let stsc_entries_2 = vec![SampleToChunkEntry {
830            first_chunk: 1,
831            samples_per_chunk: 1,
832            sample_description_index: 1,
833        }];
834
835        let stsc_2 = SampleToChunkAtom {
836            version: 0,
837            flags: [0, 0, 0],
838            entries: stsc_entries_2.into(),
839        };
840
841        let stsz_2 = SampleSizeAtom {
842            version: 0,
843            flags: [0u8; 3],
844            sample_size: 0,
845            sample_count: 0,
846            entry_sizes: vec![].into(),
847        };
848
849        let mut builder = ChunkOffsetBuilder::with_capacity(2);
850        builder.add_track(&stsc_1, &stsz_1);
851        builder.add_track(&stsc_2, &stsz_2);
852
853        let original_offsets_track_1 = vec![1000u64, 2000u64];
854        let original_offsets_track_2: Vec<u64> = vec![];
855        let original_offsets = vec![
856            original_offsets_track_1.as_slice(),
857            original_offsets_track_2.as_slice(), // Empty track has no chunks
858        ];
859
860        let (new_offsets, meta) = builder.build_chunk_offsets_ordered(original_offsets, 0);
861
862        // Only track 1 has chunks: chunk 1 (300 bytes), chunk 2 (700 bytes)
863        assert_eq!(new_offsets[0], vec![0, 300]);
864        assert_eq!(new_offsets[1], vec![]); // Empty track remains empty
865
866        assert_eq!(
867            meta.total_size,
868            stsz_1
869                .entry_sizes
870                .iter()
871                .cloned()
872                .chain(stsz_2.entry_sizes.iter().cloned())
873                .map(|s| s as u64)
874                .sum::<u64>()
875        );
876    }
877
878    #[test]
879    fn test_build_chunk_offsets_ordered_non_round_robin_interleaving() {
880        // Track 1: 5 chunks
881        let stsc_entries_1 = vec![SampleToChunkEntry {
882            first_chunk: 1,
883            samples_per_chunk: 1,
884            sample_description_index: 1,
885        }];
886
887        let stsc_1 = SampleToChunkAtom {
888            version: 0,
889            flags: [0, 0, 0],
890            entries: stsc_entries_1.into(),
891        };
892
893        let stsz_1 = SampleSizeAtom {
894            version: 0,
895            flags: [0u8; 3],
896            sample_size: 0,
897            sample_count: 5,
898            entry_sizes: vec![100, 150, 200, 250, 300].into(),
899        };
900
901        // Track 2: 4 chunks
902        let stsc_entries_2 = vec![SampleToChunkEntry {
903            first_chunk: 1,
904            samples_per_chunk: 2,
905            sample_description_index: 1,
906        }];
907
908        let stsc_2 = SampleToChunkAtom {
909            version: 0,
910            flags: [0, 0, 0],
911            entries: stsc_entries_2.into(),
912        };
913
914        let stsz_2 = SampleSizeAtom {
915            version: 0,
916            flags: [0u8; 3],
917            sample_size: 0,
918            sample_count: 8,
919            entry_sizes: vec![80, 120, 90, 110, 70, 130, 60, 140].into(),
920        };
921
922        let mut builder = ChunkOffsetBuilder::with_capacity(2);
923        builder.add_track(&stsc_1, &stsz_1);
924        builder.add_track(&stsc_2, &stsz_2);
925
926        // Create non-round-robin interleaving pattern:
927        // T1 chunks: [100, 300, 500, 800, 1000]
928        // T2 chunks: [200, 400, 600, 900]
929        // Expected order: T1C1(100), T1C2(300), T2C1(200), T1C3(500), T2C2(400), T2C3(600), T1C4(800), T2C4(900), T1C5(1000)
930        // Pattern: T1, T2, T1, T1, T2, T2, T1, T2, T1 (non-round-robin)
931        let original_offsets_track_1 = vec![100u64, 300u64, 500u64, 800u64, 1000u64];
932        let original_offsets_track_2 = vec![200u64, 400u64, 600u64, 900u64];
933        let original_offsets = vec![
934            original_offsets_track_1.as_slice(),
935            original_offsets_track_2.as_slice(),
936        ];
937
938        let (new_offsets, meta) = builder.build_chunk_offsets_ordered(original_offsets, 0);
939
940        // Calculate expected chunk sizes:
941        // T1: [100, 150, 200, 250, 300] (individual samples)
942        // T2: [(80+120)=200, (90+110)=200, (70+130)=200, (60+140)=200] (2 samples per chunk)
943        //
944        // Sequential placement based on original ordering:
945        // T1C1(100) at 0, T2C1(200) at 100, T1C2(150) at 300, T2C2(200) at 450,
946        // T1C3(200) at 650, T2C3(200) at 850, T1C4(250) at 1050, T2C4(200) at 1300, T1C5(300) at 1500
947
948        assert_eq!(new_offsets[0], vec![0, 300, 650, 1050, 1500]); // Track 1 chunks
949        assert_eq!(new_offsets[1], vec![100, 450, 850, 1300]); // Track 2 chunks
950
951        assert_eq!(
952            meta.total_size,
953            stsz_1
954                .entry_sizes
955                .iter()
956                .cloned()
957                .chain(stsz_2.entry_sizes.iter().cloned())
958                .map(|s| s as u64)
959                .sum::<u64>()
960        );
961    }
962}