use std::collections::VecDeque;
use crate::atom::{SampleSizeAtom, SampleToChunkAtom};
#[derive(Debug)]
pub struct ChunkInfo {
pub track_index: usize,
pub chunk_number: u32,
pub chunk_size: u64,
pub sample_indices: Vec<usize>,
pub sample_sizes: Vec<u32>,
}
#[derive(Debug)]
pub struct ChunkOffset {
pub track_index: usize,
pub offset: u64,
}
#[derive(Clone)]
pub struct ChunkOffsetBuilderTrack<'a> {
stsc: &'a SampleToChunkAtom,
stsz: &'a SampleSizeAtom,
}
impl<'a> ChunkOffsetBuilderTrack<'a> {
pub fn build_chunk_info(&self, track_index: usize) -> impl Iterator<Item = ChunkInfo> + 'a {
self.stsc
.entries
.iter()
.zip(
self.stsc
.entries
.iter()
.skip(1)
.map(Some)
.chain(std::iter::once(None)),
)
.scan(
(track_index, 0u32),
|(track_index, sample_index), (entry, next_entry)| {
let next_first_chunk = if let Some(next_entry) = next_entry {
next_entry.first_chunk
} else {
let remaining_samples =
self.stsz.sample_count.saturating_sub(*sample_index);
entry.first_chunk + remaining_samples.div_ceil(entry.samples_per_chunk)
};
let start_sample_index = *sample_index;
*sample_index +=
(next_first_chunk - entry.first_chunk) * entry.samples_per_chunk;
let track_index = *track_index;
Some((entry.first_chunk..next_first_chunk).scan(
(track_index, start_sample_index),
|(track_index, sample_index), chunk_num| {
let (sample_indices, sample_sizes, chunk_size) = self
.stsz
.sample_sizes()
.enumerate()
.skip(*sample_index as usize)
.take(entry.samples_per_chunk as usize)
.fold(
(
Vec::with_capacity(entry.samples_per_chunk as usize),
Vec::with_capacity(entry.samples_per_chunk as usize),
0u64,
),
|(mut sample_indices, mut sample_sizes, mut chunk_size),
(i, size)| {
sample_indices.push(i);
sample_sizes.push(*size);
chunk_size += u64::from(*size);
(sample_indices, sample_sizes, chunk_size)
},
);
*sample_index += entry.samples_per_chunk;
Some(ChunkInfo {
track_index: *track_index,
chunk_number: chunk_num,
chunk_size,
sample_indices,
sample_sizes,
})
},
))
},
)
.flatten()
}
}
#[derive(Debug, Clone)]
#[non_exhaustive]
pub struct BuildMetadata {
pub total_size: u64,
}
pub struct ChunkOffsetBuilder<'a> {
tracks: Vec<ChunkOffsetBuilderTrack<'a>>,
}
impl Default for ChunkOffsetBuilder<'_> {
fn default() -> Self {
Self::new()
}
}
impl<'a> ChunkOffsetBuilder<'a> {
pub fn new() -> Self {
Self { tracks: Vec::new() }
}
pub fn with_capacity(capacity: usize) -> Self {
Self {
tracks: Vec::with_capacity(capacity),
}
}
pub fn add_track(&mut self, stsc: &'a SampleToChunkAtom, stsz: &'a SampleSizeAtom) {
self.tracks.push(ChunkOffsetBuilderTrack { stsc, stsz });
}
pub fn build_chunk_info(&self) -> impl Iterator<Item = ChunkInfo> + 'a {
let mut iters = self
.tracks
.clone()
.into_iter()
.enumerate()
.map(|(track_index, track)| track.build_chunk_info(track_index))
.collect::<VecDeque<_>>();
std::iter::from_fn(move || {
while let Some(mut it) = iters.pop_front() {
if let Some(item) = it.next() {
iters.push_back(it);
return Some(item);
}
}
None
})
}
pub fn build_chunk_offsets(&self, start_offset: u64) -> (Vec<Vec<u64>>, BuildMetadata) {
let tracks: Vec<Vec<u64>> = (0..self.tracks.len()).map(|_| Vec::new()).collect();
let (_, chunk_offsets, total_size) = self.build_chunk_info().fold(
(start_offset, tracks, 0),
|(mut current_offset, mut tracks, size), chunk| {
let chunk_offset = current_offset;
current_offset += chunk.chunk_size;
tracks[chunk.track_index].push(chunk_offset);
(current_offset, tracks, size + chunk.chunk_size)
},
);
(chunk_offsets, BuildMetadata { total_size })
}
pub fn build_chunk_offsets_ordered(
&self,
original_chunk_offsets: Vec<&[u64]>,
start_offset: u64,
) -> (Vec<Vec<u64>>, BuildMetadata) {
struct ChunkOffsetSortable {
original_offset: u64,
track_index: usize,
chunk_size: u64,
}
let (total_chunks, mut tracks): (usize, Vec<Vec<u64>>) =
original_chunk_offsets.iter().fold(
(0, Vec::with_capacity(original_chunk_offsets.len())),
|(mut total, mut tracks), offsets| {
total += offsets.len();
tracks.push(Vec::with_capacity(offsets.len()));
(total, tracks)
},
);
let mut all_chunks = Vec::with_capacity(total_chunks);
for (track_index, track) in self.tracks.iter().enumerate() {
for (chunk_idx, chunk) in track.build_chunk_info(track_index).enumerate() {
let original_offset = original_chunk_offsets[track_index][chunk_idx];
all_chunks.push(ChunkOffsetSortable {
original_offset,
track_index: chunk.track_index,
chunk_size: chunk.chunk_size,
});
}
}
all_chunks.sort_unstable_by_key(|chunk| chunk.original_offset);
let mut total_size = 0;
let mut current_offset = start_offset;
for chunk in all_chunks {
tracks[chunk.track_index].push(current_offset);
current_offset += chunk.chunk_size;
total_size += chunk.chunk_size;
}
(tracks, BuildMetadata { total_size })
}
}
#[cfg(test)]
mod tests {
use crate::atom::stsc::SampleToChunkEntry;
use super::*;
#[test]
fn test_chunk_offset_calculation() {
let stsc_entries = vec![
SampleToChunkEntry {
first_chunk: 1,
samples_per_chunk: 2,
sample_description_index: 1,
},
SampleToChunkEntry {
first_chunk: 3,
samples_per_chunk: 3,
sample_description_index: 1,
},
];
let stsc = SampleToChunkAtom {
version: 0,
flags: [0, 0, 0],
entries: stsc_entries.into(),
};
let stsz = SampleSizeAtom {
version: 0,
flags: [0u8; 3],
sample_size: 0,
sample_count: 7,
entry_sizes: vec![100, 200, 150, 250, 300, 400, 500].into(),
};
let mut builder = ChunkOffsetBuilder::with_capacity(1);
builder.add_track(&stsc, &stsz);
let (offsets, meta) = builder.build_chunk_offsets(0);
let offsets = &offsets[0];
assert_eq!(offsets.len(), 3);
assert_eq!(offsets[0], 0); assert_eq!(offsets[1], 300); assert_eq!(offsets[2], 700); assert_eq!(
meta.total_size,
stsz.entry_sizes.iter().map(|s| *s as u64).sum::<u64>()
);
}
#[test]
fn test_chunk_info_generation() {
let stsc_entries = vec![
SampleToChunkEntry {
first_chunk: 1,
samples_per_chunk: 2,
sample_description_index: 1,
},
SampleToChunkEntry {
first_chunk: 3,
samples_per_chunk: 1,
sample_description_index: 1,
},
];
let stsc = SampleToChunkAtom {
version: 0,
flags: [0, 0, 0],
entries: stsc_entries.into(),
};
let stsz = SampleSizeAtom {
version: 0,
flags: [0u8; 3],
sample_size: 0,
sample_count: 5,
entry_sizes: vec![100, 200, 300, 400, 500].into(),
};
let mut builder = ChunkOffsetBuilder::with_capacity(1);
builder.add_track(&stsc, &stsz);
let chunk_info = builder.build_chunk_info().collect::<Vec<_>>();
assert_eq!(chunk_info.len(), 3);
assert_eq!(chunk_info[0].chunk_number, 1);
assert_eq!(chunk_info[0].chunk_size, 300); assert_eq!(chunk_info[0].sample_indices, vec![0, 1]);
assert_eq!(chunk_info[1].chunk_number, 2);
assert_eq!(chunk_info[1].chunk_size, 700); assert_eq!(chunk_info[1].sample_indices, vec![2, 3]);
assert_eq!(chunk_info[2].chunk_number, 3);
assert_eq!(chunk_info[2].chunk_size, 500); assert_eq!(chunk_info[2].sample_indices, vec![4]);
}
#[test]
fn test_edge_case_empty_samples() {
let stsc_entries = vec![SampleToChunkEntry {
first_chunk: 1,
samples_per_chunk: 1,
sample_description_index: 1,
}];
let stsc = SampleToChunkAtom {
version: 0,
flags: [0, 0, 0],
entries: stsc_entries.into(),
};
let stsz = SampleSizeAtom {
version: 0,
flags: [0u8; 3],
sample_size: 0,
sample_count: 0,
entry_sizes: vec![].into(),
};
let mut builder = ChunkOffsetBuilder::with_capacity(1);
builder.add_track(&stsc, &stsz);
let chunk_info = builder.build_chunk_info().collect::<Vec<_>>();
assert_eq!(chunk_info.len(), 0);
}
#[test]
fn test_track_interleaving() {
let stsc_entries_1 = vec![SampleToChunkEntry {
first_chunk: 1,
samples_per_chunk: 2,
sample_description_index: 1,
}];
let stsc_1 = SampleToChunkAtom {
version: 0,
flags: [0, 0, 0],
entries: stsc_entries_1.into(),
};
let stsz_1 = SampleSizeAtom {
version: 0,
flags: [0u8; 3],
sample_size: 0,
sample_count: 4,
entry_sizes: vec![100, 200, 150, 250].into(),
};
let stsc_entries_2 = vec![SampleToChunkEntry {
first_chunk: 1,
samples_per_chunk: 1,
sample_description_index: 1,
}];
let stsc_2 = SampleToChunkAtom {
version: 0,
flags: [0, 0, 0],
entries: stsc_entries_2.into(),
};
let stsz_2 = SampleSizeAtom {
version: 0,
flags: [0u8; 3],
sample_size: 0,
sample_count: 3,
entry_sizes: vec![300, 400, 500].into(),
};
let mut builder = ChunkOffsetBuilder::with_capacity(2);
builder.add_track(&stsc_1, &stsz_1);
builder.add_track(&stsc_2, &stsz_2);
let chunk_info = builder.build_chunk_info().collect::<Vec<_>>();
assert_eq!(chunk_info.len(), 5);
assert_eq!(chunk_info[0].track_index, 0);
assert_eq!(chunk_info[0].chunk_number, 1);
assert_eq!(chunk_info[0].chunk_size, 300); assert_eq!(chunk_info[0].sample_indices, vec![0, 1]);
assert_eq!(chunk_info[1].track_index, 1);
assert_eq!(chunk_info[1].chunk_number, 1);
assert_eq!(chunk_info[1].chunk_size, 300); assert_eq!(chunk_info[1].sample_indices, vec![0]);
assert_eq!(chunk_info[2].track_index, 0);
assert_eq!(chunk_info[2].chunk_number, 2);
assert_eq!(chunk_info[2].chunk_size, 400); assert_eq!(chunk_info[2].sample_indices, vec![2, 3]);
assert_eq!(chunk_info[3].track_index, 1);
assert_eq!(chunk_info[3].chunk_number, 2);
assert_eq!(chunk_info[3].chunk_size, 400); assert_eq!(chunk_info[3].sample_indices, vec![1]);
assert_eq!(chunk_info[4].track_index, 1);
assert_eq!(chunk_info[4].chunk_number, 3);
assert_eq!(chunk_info[4].chunk_size, 500); assert_eq!(chunk_info[4].sample_indices, vec![2]);
let (offsets, meta) = builder.build_chunk_offsets(0);
assert_eq!(offsets[0], vec![0, 600]);
assert_eq!(offsets[1], vec![300, 1000, 1400]);
assert_eq!(
meta.total_size,
stsz_1
.entry_sizes
.iter()
.cloned()
.chain(stsz_2.entry_sizes.iter().cloned())
.map(|s| s as u64)
.sum::<u64>()
);
}
#[test]
fn test_build_chunk_offsets_ordered() {
let stsc_entries_1 = vec![SampleToChunkEntry {
first_chunk: 1,
samples_per_chunk: 2,
sample_description_index: 1,
}];
let stsc_1 = SampleToChunkAtom {
version: 0,
flags: [0, 0, 0],
entries: stsc_entries_1.into(),
};
let stsz_1 = SampleSizeAtom {
version: 0,
flags: [0u8; 3],
sample_size: 0,
sample_count: 4,
entry_sizes: vec![100, 200, 150, 250].into(),
};
let stsc_entries_2 = vec![SampleToChunkEntry {
first_chunk: 1,
samples_per_chunk: 1,
sample_description_index: 1,
}];
let stsc_2 = SampleToChunkAtom {
version: 0,
flags: [0, 0, 0],
entries: stsc_entries_2.into(),
};
let stsz_2 = SampleSizeAtom {
version: 0,
flags: [0u8; 3],
sample_size: 0,
sample_count: 2,
entry_sizes: vec![300, 400].into(),
};
let mut builder = ChunkOffsetBuilder::with_capacity(2);
builder.add_track(&stsc_1, &stsz_1);
builder.add_track(&stsc_2, &stsz_2);
let original_offsets_track_1 = vec![1000u64, 2000u64];
let original_offsets_track_2 = vec![500u64, 1500u64];
let original_offsets = vec![
original_offsets_track_1.as_slice(),
original_offsets_track_2.as_slice(),
];
let (new_offsets, meta) = builder.build_chunk_offsets_ordered(original_offsets, 0);
assert_eq!(new_offsets[0], vec![300, 1000]);
assert_eq!(new_offsets[1], vec![0, 600]);
assert_eq!(
meta.total_size,
stsz_1
.entry_sizes
.iter()
.cloned()
.chain(stsz_2.entry_sizes.iter().cloned())
.map(|s| s as u64)
.sum::<u64>()
);
}
#[test]
fn test_build_chunk_offsets_ordered_single_track() {
let stsc_entries = vec![SampleToChunkEntry {
first_chunk: 1,
samples_per_chunk: 3,
sample_description_index: 1,
}];
let stsc = SampleToChunkAtom {
version: 0,
flags: [0, 0, 0],
entries: stsc_entries.into(),
};
let stsz = SampleSizeAtom {
version: 0,
flags: [0u8; 3],
sample_size: 0,
sample_count: 6,
entry_sizes: vec![100, 200, 300, 150, 250, 350].into(),
};
let mut builder = ChunkOffsetBuilder::with_capacity(1);
builder.add_track(&stsc, &stsz);
let original_offsets_track_1 = vec![5000u64, 10000u64];
let original_offsets = vec![original_offsets_track_1.as_slice()];
let (new_offsets, meta) = builder.build_chunk_offsets_ordered(original_offsets, 0);
assert_eq!(new_offsets[0], vec![0, 600]);
assert_eq!(
meta.total_size,
stsz.entry_sizes
.iter()
.cloned()
.map(|s| s as u64)
.sum::<u64>()
);
}
#[test]
fn test_build_chunk_offsets_ordered_non_zero_start() {
let stsc_entries = vec![SampleToChunkEntry {
first_chunk: 1,
samples_per_chunk: 1,
sample_description_index: 1,
}];
let stsc = SampleToChunkAtom {
version: 0,
flags: [0, 0, 0],
entries: stsc_entries.into(),
};
let stsz = SampleSizeAtom {
version: 0,
flags: [0u8; 3],
sample_size: 0,
sample_count: 3,
entry_sizes: vec![100, 200, 300].into(),
};
let mut builder = ChunkOffsetBuilder::with_capacity(1);
builder.add_track(&stsc, &stsz);
let original_offsets_track_1 = vec![1000u64, 2000u64, 3000u64];
let original_offsets = vec![original_offsets_track_1.as_slice()];
let start_offset = 50000u64;
let (new_offsets, meta) =
builder.build_chunk_offsets_ordered(original_offsets, start_offset);
assert_eq!(new_offsets[0], vec![50000, 50100, 50300]);
assert_eq!(
meta.total_size,
stsz.entry_sizes
.iter()
.cloned()
.map(|s| s as u64)
.sum::<u64>()
);
}
#[test]
fn test_build_chunk_offsets_ordered_interleaving() {
let stsc_entries_1 = vec![
SampleToChunkEntry {
first_chunk: 1,
samples_per_chunk: 1,
sample_description_index: 1,
},
SampleToChunkEntry {
first_chunk: 2,
samples_per_chunk: 2,
sample_description_index: 1,
},
];
let stsc_1 = SampleToChunkAtom {
version: 0,
flags: [0, 0, 0],
entries: stsc_entries_1.into(),
};
let stsz_1 = SampleSizeAtom {
version: 0,
flags: [0u8; 3],
sample_size: 0,
sample_count: 5,
entry_sizes: vec![100, 150, 200, 250, 300].into(),
};
let stsc_entries_2 = vec![SampleToChunkEntry {
first_chunk: 1,
samples_per_chunk: 2,
sample_description_index: 1,
}];
let stsc_2 = SampleToChunkAtom {
version: 0,
flags: [0, 0, 0],
entries: stsc_entries_2.into(),
};
let stsz_2 = SampleSizeAtom {
version: 0,
flags: [0u8; 3],
sample_size: 0,
sample_count: 4,
entry_sizes: vec![80, 120, 160, 240].into(),
};
let mut builder = ChunkOffsetBuilder::with_capacity(2);
builder.add_track(&stsc_1, &stsz_1);
builder.add_track(&stsc_2, &stsz_2);
let original_offsets_track_1 = vec![100u64, 300u64, 700u64];
let original_offsets_track_2 = vec![200u64, 600u64];
let original_offsets = vec![
original_offsets_track_1.as_slice(),
original_offsets_track_2.as_slice(),
];
let (new_offsets, meta) = builder.build_chunk_offsets_ordered(original_offsets, 0);
assert_eq!(new_offsets[0], vec![0, 300, 1050]);
assert_eq!(new_offsets[1], vec![100, 650]);
assert_eq!(
meta.total_size,
stsz_1
.entry_sizes
.iter()
.cloned()
.chain(stsz_2.entry_sizes.iter().cloned())
.map(|s| s as u64)
.sum::<u64>()
);
}
#[test]
fn test_build_chunk_offsets_ordered_different_chunk_sizes() {
let stsc_entries_1 = vec![SampleToChunkEntry {
first_chunk: 1,
samples_per_chunk: 4,
sample_description_index: 1,
}];
let stsc_1 = SampleToChunkAtom {
version: 0,
flags: [0, 0, 0],
entries: stsc_entries_1.into(),
};
let stsz_1 = SampleSizeAtom {
version: 0,
flags: [0u8; 3],
sample_size: 0,
sample_count: 8,
entry_sizes: vec![1000; 8].into(), };
let stsc_entries_2 = vec![SampleToChunkEntry {
first_chunk: 1,
samples_per_chunk: 1,
sample_description_index: 1,
}];
let stsc_2 = SampleToChunkAtom {
version: 0,
flags: [0, 0, 0],
entries: stsc_entries_2.into(),
};
let stsz_2 = SampleSizeAtom {
version: 0,
flags: [0u8; 3],
sample_size: 0,
sample_count: 4,
entry_sizes: vec![50; 4].into(), };
let mut builder = ChunkOffsetBuilder::with_capacity(2);
builder.add_track(&stsc_1, &stsz_1);
builder.add_track(&stsc_2, &stsz_2);
let original_offsets_track_1 = vec![1000u64, 10000u64];
let original_offsets_track_2 = vec![2000u64, 3000u64, 4000u64, 5000u64];
let original_offsets = vec![
original_offsets_track_1.as_slice(),
original_offsets_track_2.as_slice(),
];
let (new_offsets, meta) = builder.build_chunk_offsets_ordered(original_offsets, 0);
assert_eq!(new_offsets[0], vec![0, 4200]);
assert_eq!(new_offsets[1], vec![4000, 4050, 4100, 4150]);
assert_eq!(
meta.total_size,
stsz_1
.entry_sizes
.iter()
.cloned()
.chain(stsz_2.entry_sizes.iter().cloned())
.map(|s| s as u64)
.sum::<u64>()
);
}
#[test]
fn test_build_chunk_offsets_ordered_empty_track_handling() {
let stsc_entries_1 = vec![SampleToChunkEntry {
first_chunk: 1,
samples_per_chunk: 2,
sample_description_index: 1,
}];
let stsc_1 = SampleToChunkAtom {
version: 0,
flags: [0, 0, 0],
entries: stsc_entries_1.into(),
};
let stsz_1 = SampleSizeAtom {
version: 0,
flags: [0u8; 3],
sample_size: 0,
sample_count: 4,
entry_sizes: vec![100, 200, 300, 400].into(),
};
let stsc_entries_2 = vec![SampleToChunkEntry {
first_chunk: 1,
samples_per_chunk: 1,
sample_description_index: 1,
}];
let stsc_2 = SampleToChunkAtom {
version: 0,
flags: [0, 0, 0],
entries: stsc_entries_2.into(),
};
let stsz_2 = SampleSizeAtom {
version: 0,
flags: [0u8; 3],
sample_size: 0,
sample_count: 0,
entry_sizes: vec![].into(),
};
let mut builder = ChunkOffsetBuilder::with_capacity(2);
builder.add_track(&stsc_1, &stsz_1);
builder.add_track(&stsc_2, &stsz_2);
let original_offsets_track_1 = vec![1000u64, 2000u64];
let original_offsets_track_2: Vec<u64> = vec![];
let original_offsets = vec![
original_offsets_track_1.as_slice(),
original_offsets_track_2.as_slice(), ];
let (new_offsets, meta) = builder.build_chunk_offsets_ordered(original_offsets, 0);
assert_eq!(new_offsets[0], vec![0, 300]);
assert_eq!(new_offsets[1], vec![]);
assert_eq!(
meta.total_size,
stsz_1
.entry_sizes
.iter()
.cloned()
.chain(stsz_2.entry_sizes.iter().cloned())
.map(|s| s as u64)
.sum::<u64>()
);
}
#[test]
fn test_build_chunk_offsets_ordered_non_round_robin_interleaving() {
let stsc_entries_1 = vec![SampleToChunkEntry {
first_chunk: 1,
samples_per_chunk: 1,
sample_description_index: 1,
}];
let stsc_1 = SampleToChunkAtom {
version: 0,
flags: [0, 0, 0],
entries: stsc_entries_1.into(),
};
let stsz_1 = SampleSizeAtom {
version: 0,
flags: [0u8; 3],
sample_size: 0,
sample_count: 5,
entry_sizes: vec![100, 150, 200, 250, 300].into(),
};
let stsc_entries_2 = vec![SampleToChunkEntry {
first_chunk: 1,
samples_per_chunk: 2,
sample_description_index: 1,
}];
let stsc_2 = SampleToChunkAtom {
version: 0,
flags: [0, 0, 0],
entries: stsc_entries_2.into(),
};
let stsz_2 = SampleSizeAtom {
version: 0,
flags: [0u8; 3],
sample_size: 0,
sample_count: 8,
entry_sizes: vec![80, 120, 90, 110, 70, 130, 60, 140].into(),
};
let mut builder = ChunkOffsetBuilder::with_capacity(2);
builder.add_track(&stsc_1, &stsz_1);
builder.add_track(&stsc_2, &stsz_2);
let original_offsets_track_1 = vec![100u64, 300u64, 500u64, 800u64, 1000u64];
let original_offsets_track_2 = vec![200u64, 400u64, 600u64, 900u64];
let original_offsets = vec![
original_offsets_track_1.as_slice(),
original_offsets_track_2.as_slice(),
];
let (new_offsets, meta) = builder.build_chunk_offsets_ordered(original_offsets, 0);
assert_eq!(new_offsets[0], vec![0, 300, 650, 1050, 1500]); assert_eq!(new_offsets[1], vec![100, 450, 850, 1300]);
assert_eq!(
meta.total_size,
stsz_1
.entry_sizes
.iter()
.cloned()
.chain(stsz_2.entry_sizes.iter().cloned())
.map(|s| s as u64)
.sum::<u64>()
);
}
}