Skip to main content

wedeo_core/
frame.rs

1use bitflags::bitflags;
2
3use crate::buffer::Buffer;
4use crate::channel_layout::ChannelLayout;
5use crate::chroma_location::ChromaLocation;
6use crate::color_primaries::ColorPrimaries;
7use crate::color_trc::ColorTransferCharacteristic;
8use crate::frame_side_data::{FrameSideData, FrameSideDataType};
9use crate::metadata::Metadata;
10use crate::pixel_format::PixelFormat;
11use crate::rational::Rational;
12use crate::sample_format::SampleFormat;
13use crate::timestamp::NOPTS_VALUE;
14
15/// Maximum number of data planes in a video frame.
16pub const VIDEO_MAX_PLANES: usize = 4;
17
18/// Maximum number of data planes in an audio frame.
19pub const AUDIO_MAX_PLANES: usize = 8;
20
21/// Picture type, matching FFmpeg's AVPictureType.
22#[derive(Debug, Clone, Copy, PartialEq, Eq)]
23#[repr(u8)]
24pub enum PictureType {
25    None = 0,
26    I = 1,
27    P = 2,
28    B = 3,
29    S = 4,
30    Si = 5,
31    Sp = 6,
32    Bi = 7,
33}
34
35/// Color range, matching FFmpeg's AVColorRange.
36#[derive(Debug, Clone, Copy, PartialEq, Eq)]
37#[repr(u32)]
38pub enum ColorRange {
39    Unspecified = 0,
40    /// The normal 219*2^(n-8) "MPEG" YUV ranges.
41    Mpeg = 1,
42    /// The normal 2^n-1 "JPEG" YUV ranges.
43    Jpeg = 2,
44}
45
46/// Color space, matching FFmpeg's AVColorSpace.
47#[derive(Debug, Clone, Copy, PartialEq, Eq)]
48#[repr(u32)]
49pub enum ColorSpace {
50    Rgb = 0,
51    Bt709 = 1,
52    Unspecified = 2,
53    Fcc = 4,
54    Bt470bg = 5,
55    Smpte170m = 6,
56    Smpte240m = 7,
57    Ycgco = 8,
58    Bt2020Ncl = 9,
59    Bt2020Cl = 10,
60}
61
62bitflags! {
63    /// Frame flags, matching FFmpeg's AV_FRAME_FLAG_*.
64    #[derive(Debug, Clone, Copy, PartialEq, Eq)]
65    pub struct FrameFlags: u32 {
66        const CORRUPT    = 1 << 0;
67        const KEY        = 1 << 1;
68        const DISCARD    = 1 << 2;
69        const INTERLACED = 1 << 3;
70        const TOP_FIRST  = 1 << 4;
71    }
72}
73
74/// A plane of frame data (pointer into a Buffer).
75#[derive(Debug, Clone)]
76pub struct FramePlane {
77    pub buffer: Buffer,
78    /// Offset into the buffer where this plane's data starts.
79    pub offset: usize,
80    /// Stride (bytes per row for video, total bytes for audio plane).
81    pub linesize: usize,
82}
83
84/// Video-specific frame data.
85#[derive(Debug, Clone)]
86pub struct VideoFrameData {
87    pub planes: Vec<FramePlane>,
88    pub width: u32,
89    pub height: u32,
90    pub format: PixelFormat,
91    pub picture_type: PictureType,
92    pub color_range: ColorRange,
93    pub color_space: ColorSpace,
94    pub color_primaries: ColorPrimaries,
95    pub color_trc: ColorTransferCharacteristic,
96    pub chroma_location: ChromaLocation,
97    pub sample_aspect_ratio: Rational,
98    /// Crop rectangle — number of pixels to discard from each edge.
99    pub crop_top: u32,
100    pub crop_bottom: u32,
101    pub crop_left: u32,
102    pub crop_right: u32,
103}
104
105/// Audio-specific frame data.
106#[derive(Debug, Clone)]
107pub struct AudioFrameData {
108    pub planes: Vec<FramePlane>,
109    pub nb_samples: u32,
110    pub format: SampleFormat,
111    pub sample_rate: u32,
112    pub channel_layout: ChannelLayout,
113}
114
115/// Frame data — either video or audio.
116/// Using an enum prevents accessing width on audio frames (compile-time safety).
117#[derive(Debug, Clone)]
118pub enum FrameData {
119    Video(VideoFrameData),
120    Audio(AudioFrameData),
121}
122
123/// Decoded frame, matching FFmpeg's AVFrame concept.
124#[derive(Debug, Clone)]
125pub struct Frame {
126    pub data: FrameData,
127    /// Presentation timestamp in time_base units.
128    pub pts: i64,
129    /// Decompression timestamp, copied from the packet that triggered output.
130    pub pkt_dts: i64,
131    /// Best-effort timestamp estimated by the framework.
132    pub best_effort_timestamp: i64,
133    /// Duration in time_base units.
134    pub duration: i64,
135    /// Time base for pts/duration.
136    pub time_base: Rational,
137    pub flags: FrameFlags,
138    /// Extra flag to indicate a picture should be decoded but displayed twice
139    /// (or according to repeat_pict). Matches FFmpeg's AVFrame.repeat_pict.
140    pub repeat_pict: i32,
141    /// Metadata key-value pairs.
142    pub metadata: Metadata,
143    /// Side data associated with this frame.
144    pub side_data: Vec<FrameSideData>,
145}
146
147impl Frame {
148    /// Create a new video frame.
149    pub fn new_video(width: u32, height: u32, format: PixelFormat) -> Self {
150        Self {
151            data: FrameData::Video(VideoFrameData {
152                planes: Vec::new(),
153                width,
154                height,
155                format,
156                picture_type: PictureType::None,
157                color_range: ColorRange::Unspecified,
158                color_space: ColorSpace::Unspecified,
159                color_primaries: ColorPrimaries::Unspecified,
160                color_trc: ColorTransferCharacteristic::Unspecified,
161                chroma_location: ChromaLocation::Unspecified,
162                sample_aspect_ratio: Rational::new(0, 1),
163                crop_top: 0,
164                crop_bottom: 0,
165                crop_left: 0,
166                crop_right: 0,
167            }),
168            pts: NOPTS_VALUE,
169            pkt_dts: NOPTS_VALUE,
170            best_effort_timestamp: NOPTS_VALUE,
171            duration: 0,
172            time_base: Rational::new(0, 1),
173            flags: FrameFlags::empty(),
174            repeat_pict: 0,
175            metadata: Metadata::new(),
176            side_data: Vec::new(),
177        }
178    }
179
180    /// Create a new audio frame.
181    pub fn new_audio(
182        nb_samples: u32,
183        format: SampleFormat,
184        sample_rate: u32,
185        channel_layout: ChannelLayout,
186    ) -> Self {
187        Self {
188            data: FrameData::Audio(AudioFrameData {
189                planes: Vec::new(),
190                nb_samples,
191                format,
192                sample_rate,
193                channel_layout,
194            }),
195            pts: NOPTS_VALUE,
196            pkt_dts: NOPTS_VALUE,
197            best_effort_timestamp: NOPTS_VALUE,
198            duration: 0,
199            time_base: Rational::new(0, 1),
200            flags: FrameFlags::empty(),
201            repeat_pict: 0,
202            metadata: Metadata::new(),
203            side_data: Vec::new(),
204        }
205    }
206
207    /// Returns true if this is a video frame.
208    pub fn is_video(&self) -> bool {
209        matches!(self.data, FrameData::Video(_))
210    }
211
212    /// Returns true if this is an audio frame.
213    pub fn is_audio(&self) -> bool {
214        matches!(self.data, FrameData::Audio(_))
215    }
216
217    /// Get video data, if this is a video frame.
218    pub fn video(&self) -> Option<&VideoFrameData> {
219        match &self.data {
220            FrameData::Video(v) => Some(v),
221            _ => None,
222        }
223    }
224
225    /// Get mutable video data, if this is a video frame.
226    pub fn video_mut(&mut self) -> Option<&mut VideoFrameData> {
227        match &mut self.data {
228            FrameData::Video(v) => Some(v),
229            _ => None,
230        }
231    }
232
233    /// Get audio data, if this is an audio frame.
234    pub fn audio(&self) -> Option<&AudioFrameData> {
235        match &self.data {
236            FrameData::Audio(a) => Some(a),
237            _ => None,
238        }
239    }
240
241    /// Get mutable audio data, if this is an audio frame.
242    pub fn audio_mut(&mut self) -> Option<&mut AudioFrameData> {
243        match &mut self.data {
244            FrameData::Audio(a) => Some(a),
245            _ => None,
246        }
247    }
248
249    /// Get side data of a specific type.
250    pub fn get_side_data(&self, data_type: FrameSideDataType) -> Option<&FrameSideData> {
251        self.side_data.iter().find(|sd| sd.data_type == data_type)
252    }
253
254    /// Add side data to the frame.
255    ///
256    /// Does NOT replace existing data of the same type — multiple entries of
257    /// the same type are allowed, matching FFmpeg's `av_frame_new_side_data`.
258    /// Use `remove_side_data` first if you need unique-per-type behavior.
259    pub fn add_side_data(&mut self, side_data: FrameSideData) {
260        self.side_data.push(side_data);
261    }
262
263    /// Remove side data of a specific type.
264    pub fn remove_side_data(&mut self, data_type: FrameSideDataType) {
265        self.side_data.retain(|sd| sd.data_type != data_type);
266    }
267}