async_ffmpeg_sidecar/
event.rs

1/// Any event that occurs during the execution of an Ffmpeg command,
2/// inluding log messages, parsed metadata, progress updates, and output.
3#[derive(Debug, Clone, PartialEq)]
4pub enum FfmpegEvent {
5  ParsedVersion(FfmpegVersion),
6  ParsedConfiguration(FfmpegConfiguration),
7  ParsedStreamMapping(String),
8  ParsedInput(FfmpegInput),
9  ParsedOutput(FfmpegOutput),
10  ParsedInputStream(FfmpegStream),
11  ParsedOutputStream(FfmpegStream),
12  ParsedDuration(FfmpegDuration),
13  Log(LogLevel, String),
14  LogEOF,
15  /// An error that didn't originate from the ffmpeg logs
16  Error(String),
17  Progress(FfmpegProgress),
18  // Not parsing output frames for now
19  // OutputFrame(OutputVideoFrame),
20  /// A chunk of data that may not correspond to a complete frame.
21  /// For examples, it may contain encoded h264.
22  /// These chunks will need to be handled manually, or piped directly to
23  /// another ffmpeg instance
24  OutputChunk(Vec<u8>),
25  Done,
26}
27
28/// The internal log level designated by FFmpeg on each message.
29#[derive(Debug, Clone, PartialEq)]
30pub enum LogLevel {
31  Info,
32  Warning,
33  Error,
34  Fatal,
35  Unknown,
36}
37
38#[derive(Debug, Clone, PartialEq)]
39pub struct FfmpegInput {
40  pub index: u32,
41  pub duration: Option<f64>,
42  pub raw_log_message: String,
43}
44
45#[derive(Debug, Clone, PartialEq)]
46pub struct FfmpegDuration {
47  pub input_index: u32,
48  pub duration: f64,
49  pub raw_log_message: String,
50}
51
52#[derive(Debug, Clone, PartialEq)]
53pub struct FfmpegOutput {
54  pub to: String,
55  pub index: u32,
56  pub raw_log_message: String,
57}
58
59impl FfmpegOutput {
60  /// Detects one of several identifiers which indicate output to stdout
61  pub fn is_stdout(&self) -> bool {
62    ["pipe", "pipe:1", "pipe:"].contains(&self.to.as_str())
63  }
64}
65
66/// Represents metadata about a stream.
67#[derive(Debug, Clone, PartialEq)]
68pub struct FfmpegStream {
69  /// Corresponds to stream `-f` parameter, e.g. `rawvideo`, `h264`, `opus` or `srt`.
70  pub format: String,
71  // The language of the stream as a three-letter code such as `eng`, `ger` or `jpn`.
72  pub language: String,
73  /// The index of the input or output that this stream belongs to.
74  pub parent_index: u32,
75  /// The index of the stream inside the input.
76  pub stream_index: u32,
77  /// The stderr line that this stream was parsed from.
78  pub raw_log_message: String,
79  // Data that is specific to a certain stream type.
80  pub type_specific_data: StreamTypeSpecificData,
81}
82
83impl FfmpegStream {
84  pub fn is_audio(&self) -> bool {
85    matches!(self.type_specific_data, StreamTypeSpecificData::Audio(_))
86  }
87
88  pub fn is_subtitle(&self) -> bool {
89    matches!(self.type_specific_data, StreamTypeSpecificData::Subtitle)
90  }
91
92  pub fn is_video(&self) -> bool {
93    matches!(self.type_specific_data, StreamTypeSpecificData::Video(_))
94  }
95
96  pub fn is_other(&self) -> bool {
97    matches!(self.type_specific_data, StreamTypeSpecificData::Other)
98  }
99
100  pub fn audio_data(&self) -> Option<&AudioStream> {
101    match &self.type_specific_data {
102      StreamTypeSpecificData::Audio(audio_stream) => Some(audio_stream),
103      _ => None,
104    }
105  }
106
107  pub fn video_data(&self) -> Option<&VideoStream> {
108    match &self.type_specific_data {
109      StreamTypeSpecificData::Video(video_stream) => Some(video_stream),
110      _ => None,
111    }
112  }
113}
114
115/// Represents metadata that is specific to a stream, e.g. fields that are only found in audio
116/// streams or that are only found in video streams, etc. Storing this in an enum allows function
117/// to accept the generic `Stream` type regardless of its actual type (audio, video, ...).
118#[derive(Debug, Clone, PartialEq)]
119pub enum StreamTypeSpecificData {
120  Audio(AudioStream),
121  Video(VideoStream),
122  Subtitle,
123  Other,
124}
125
126/// Represents metadata that is specific to audio stream.
127#[derive(Debug, Clone, PartialEq)]
128pub struct AudioStream {
129  /// The sample rate of the audio stream, e.g. 48000 (Hz)
130  pub sample_rate: u32,
131  /// The number of channels of the audio stream, e.g. `stereo`, `5.1`, or `7.1`
132  pub channels: String,
133}
134
135/// Represents metadata that is specific to video streams.
136#[derive(Debug, Clone, PartialEq)]
137pub struct VideoStream {
138  /// Corresponds to stream `-pix_fmt` parameter, e.g. `rgb24`
139  pub pix_fmt: String,
140  /// Width in pixels
141  pub width: u32,
142  /// Height in pixels
143  pub height: u32,
144  /// Framerate in frames per second
145  pub fps: f32,
146}
147
148#[derive(Debug, Clone, PartialEq)]
149pub struct FfmpegVersion {
150  pub version: String,
151  pub raw_log_message: String,
152}
153
154#[derive(Debug, Clone, PartialEq)]
155pub struct FfmpegConfiguration {
156  pub configuration: Vec<String>,
157  pub raw_log_message: String,
158}
159
160#[derive(Debug, Clone, PartialEq)]
161pub struct FfmpegProgress {
162  /// index of the current output frame
163  pub frame: u32,
164  /// frames per second
165  pub fps: f32,
166  /// Quality factor (if applicable)
167  pub q: f32,
168  /// Current total size of the output in kilobytes
169  pub size_kb: u32,
170  /// The raw time string in format like `00:03:29.04`
171  pub time: String,
172  /// Bitrate in kilo**bits** per second
173  pub bitrate_kbps: f32,
174  /// Processing speed as a ratio of the input duration
175  ///
176  /// - 1x is realtime
177  /// - 2x means 2 seconds of input are processed in 1 second of wall clock time
178  pub speed: f32,
179  /// The line that this progress was parsed from
180  pub raw_log_message: String,
181}
182
183// #[derive(Clone, PartialEq)]
184// pub struct OutputVideoFrame {
185//     /// The width of this video frame in pixels
186//     pub width: u32,
187//     /// The height of this video frame in pixels
188//     pub height: u32,
189//     /// The pixel format of the video frame, corresponding to the chosen
190//     /// `-pix_fmt` FFmpeg parameter.
191//     pub pix_fmt: String,
192//     /// The index of the FFmpeg output stream that emitted this frame.
193//     /// In a typical case, there is only one output stream and this will be 0.
194//     pub output_index: u32,
195//     /// Raw image frame data. The layout of the pixels in memory depends on
196//     /// `width`, `height`, and `pix_fmt`.
197//     pub data: Vec<u8>,
198//     /// Index of current frame, starting at 0 and monotonically increasing by 1
199//     pub frame_num: u32,
200//     /// Output frame timestamp in seconds
201//     pub timestamp: f32,
202// }
203
204// impl std::fmt::Debug for OutputVideoFrame {
205//     /// Omit the `data` field form the debug output
206//     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
207//         f.debug_struct("OutputVideoFrame")
208//             .field("width", &self.width)
209//             .field("height", &self.height)
210//             .field("pix_fmt", &self.pix_fmt)
211//             .field("output_index", &self.output_index)
212//             .finish()
213//     }
214// }