ffmpeg_sidecar/
event.rs

1//! Any event that occurs during the execution of an FFmpeg command.
2
3/// Any event that occurs during the execution of an FFmpeg command,
4/// including log messages, parsed metadata, progress updates, and output.
5#[derive(Debug, Clone, PartialEq)]
6pub enum FfmpegEvent {
7  ParsedVersion(FfmpegVersion),
8  ParsedConfiguration(FfmpegConfiguration),
9  ParsedStreamMapping(String),
10  ParsedInput(FfmpegInput),
11  ParsedOutput(FfmpegOutput),
12  ParsedInputStream(Stream),
13  ParsedOutputStream(Stream),
14  ParsedDuration(FfmpegDuration),
15  Log(LogLevel, String),
16  LogEOF,
17  /// An error that didn't originate from the ffmpeg logs
18  Error(String),
19  Progress(FfmpegProgress),
20  OutputFrame(OutputVideoFrame),
21  /// A chunk of data that may not correspond to a complete frame.
22  /// For example, it may contain encoded h264.
23  /// These chunks will need to be handled manually, or piped directly to
24  /// another FFmpeg instance.
25  OutputChunk(Vec<u8>),
26  Done,
27}
28
29/// The internal log level designated by FFmpeg on each message.
30#[derive(Debug, Clone, PartialEq)]
31pub enum LogLevel {
32  Info,
33  Warning,
34  Error,
35  Fatal,
36  Unknown,
37}
38
39#[derive(Debug, Clone, PartialEq)]
40pub struct FfmpegInput {
41  pub index: u32,
42  pub duration: Option<f64>,
43  pub raw_log_message: String,
44}
45
46#[derive(Debug, Clone, PartialEq)]
47pub struct FfmpegDuration {
48  pub input_index: u32,
49  pub duration: f64,
50  pub raw_log_message: String,
51}
52
53#[derive(Debug, Clone, PartialEq)]
54pub struct FfmpegOutput {
55  pub to: String,
56  pub index: u32,
57  pub raw_log_message: String,
58}
59
60impl FfmpegOutput {
61  /// Detects one of several identifiers which indicate output to stdout
62  pub fn is_stdout(&self) -> bool {
63    ["pipe", "pipe:", "pipe:1"].contains(&self.to.as_str())
64  }
65}
66
67/// Represents metadata about a stream.
68#[derive(Debug, Clone, PartialEq)]
69pub struct Stream {
70  /// Corresponds to stream `-f` parameter, e.g. `rawvideo`, `h264`, `opus` or `srt`.
71  pub format: String,
72  // The language of the stream as a three letter code such as `eng`, `ger` or `jpn`.
73  pub language: String,
74  /// The index of the input or output that this stream belongs to.
75  pub parent_index: u32,
76  /// The index of the stream inside the input.
77  pub stream_index: u32,
78  /// The stderr line that this stream was parsed from.
79  pub raw_log_message: String,
80  // Data that is specific to a certain stream type.
81  pub type_specific_data: StreamTypeSpecificData,
82}
83
84impl Stream {
85  pub fn is_audio(&self) -> bool {
86    matches!(self.type_specific_data, StreamTypeSpecificData::Audio(_))
87  }
88  pub fn is_subtitle(&self) -> bool {
89    matches!(self.type_specific_data, StreamTypeSpecificData::Subtitle())
90  }
91  pub fn is_video(&self) -> bool {
92    matches!(self.type_specific_data, StreamTypeSpecificData::Video(_))
93  }
94  pub fn is_other(&self) -> bool {
95    matches!(self.type_specific_data, StreamTypeSpecificData::Other())
96  }
97
98  pub fn audio_data(&self) -> Option<&AudioStream> {
99    match &self.type_specific_data {
100      StreamTypeSpecificData::Audio(audio_stream) => Some(audio_stream),
101      _ => None,
102    }
103  }
104  pub fn video_data(&self) -> Option<&VideoStream> {
105    match &self.type_specific_data {
106      StreamTypeSpecificData::Video(video_stream) => Some(video_stream),
107      _ => None,
108    }
109  }
110}
111
112/// Represents metadata that is specific to a stream, e.g. fields that are only found in audio
113/// streams or that are only found in video streams, etc. Storing this in an enum allows function to
114/// accept the generic `Stream` type regardless of its actual type (audio, video, ...).
115#[derive(Debug, Clone, PartialEq)]
116pub enum StreamTypeSpecificData {
117  Audio(AudioStream),
118  Video(VideoStream),
119  Subtitle(),
120  Other(),
121}
122
123/// Represents metadata that is specific to audio streams.
124#[derive(Debug, Clone, PartialEq)]
125pub struct AudioStream {
126  /// The sample rate of the audio stream, e.g. 48000 (Hz)
127  pub sample_rate: u32,
128  /// The number of channels of the audio stream, e.g. `stereo`, `5.1` or `7.1`
129  pub channels: String,
130}
131
132/// Represents metadata that is specific to video streams.
133#[derive(Debug, Clone, PartialEq)]
134pub struct VideoStream {
135  /// Corresponds to stream `-pix_fmt` parameter, e.g. `rgb24`
136  pub pix_fmt: String,
137  /// Width in pixels
138  pub width: u32,
139  /// Height in pixels
140  pub height: u32,
141  /// Framerate in frames per second
142  pub fps: f32,
143}
144
145#[derive(Debug, Clone, PartialEq)]
146pub struct FfmpegVersion {
147  pub version: String,
148  pub raw_log_message: String,
149}
150
151#[derive(Debug, Clone, PartialEq)]
152pub struct FfmpegConfiguration {
153  pub configuration: Vec<String>,
154  pub raw_log_message: String,
155}
156
157#[derive(Debug, Clone, PartialEq)]
158pub struct FfmpegProgress {
159  /// index of the current output frame
160  pub frame: u32,
161
162  /// frames per second
163  pub fps: f32,
164
165  /// Quality factor (if applicable)
166  pub q: f32,
167
168  /// Current total size of the output in kilobytes
169  pub size_kb: u32,
170
171  /// The raw time string in a format like `00:03:29.04`
172  pub time: String,
173
174  /// Bitrate in kilo**bits** per second
175  pub bitrate_kbps: f32,
176
177  /// Processing speed as a ratio of the input duration
178  ///
179  /// - 1x is realtime
180  /// - 2x means 2 seconds of input are processed in 1 second of wall clock time
181  pub speed: f32,
182
183  /// The line that this progress was parsed from
184  pub raw_log_message: String,
185}
186
187#[derive(Clone, PartialEq)]
188pub struct OutputVideoFrame {
189  /// The width of this video frame in pixels
190  pub width: u32,
191  /// The height of this video frame in pixels
192  pub height: u32,
193  /// The pixel format of the video frame, corresponding to the chosen
194  /// `-pix_fmt` FFmpeg parameter.
195  pub pix_fmt: String,
196  /// The index of the FFmpeg output stream that emitted this frame.
197  /// In a typical case, there is only one output stream and this will be 0.
198  pub output_index: u32,
199  /// Raw image frame data. The layout of the pixels in memory depends on
200  /// `width`, `height`, and `pix_fmt`.
201  pub data: Vec<u8>,
202  /// Index of current frame, starting at 0 and monotonically increasing by 1
203  pub frame_num: u32,
204  /// Output frame timestamp in seconds
205  pub timestamp: f32,
206}
207
208impl std::fmt::Debug for OutputVideoFrame {
209  /// Omit the `data` field from the debug output
210  fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
211    f.debug_struct("OutputVideoFrame")
212      .field("width", &self.width)
213      .field("height", &self.height)
214      .field("pix_fmt", &self.pix_fmt)
215      .field("output_index", &self.output_index)
216      .finish()
217  }
218}
219
220// TODO fix the output for OutputChunk also