adder_codec_rs/transcoder/source/
framed.rs

1use crate::transcoder::source::video::SourceError;
2use crate::transcoder::source::video::Video;
3use crate::transcoder::source::video::{Source, VideoBuilder};
4use adder_codec_core::Mode::FramePerfect;
5use adder_codec_core::{DeltaT, Event, PixelMultiMode, PlaneSize, SourceCamera, TimeMode};
6
7use crate::utils::viz::ShowFeatureMode;
8use adder_codec_core::codec::{EncoderOptions, EncoderType};
9
10use crate::utils::cv::handle_color;
11#[cfg(feature = "feature-logging")]
12use crate::utils::cv::{calculate_quality_metrics, QualityMetrics};
13
14use std::io::Write;
15use std::path::PathBuf;
16
17#[cfg(feature = "feature-logging")]
18use chrono::Local;
19use video_rs_adder_dep::{self, Decoder, Frame, Locator, Options, Resize};
20
21/// Attributes of a framed video -> ADΔER transcode
22pub struct Framed<W: Write + 'static + std::marker::Send + std::marker::Sync> {
23    cap: Decoder,
24    pub(crate) input_frame: Frame,
25
26    /// Index of the first frame to be read from the input video
27    pub frame_idx_start: u32,
28
29    /// FPS of the input video. Set automatically by `Framed::new()`
30    pub source_fps: f32,
31
32    /// Scale of the input video. Input frames are resized to this scale before transcoding.
33    pub scale: f64,
34
35    /// Whether the input video is color
36    color_input: bool,
37
38    pub(crate) video: Video<W>,
39}
40unsafe impl<W: Write + std::marker::Send + std::marker::Sync> Sync for Framed<W> {}
41
42impl<W: Write + 'static + std::marker::Send + std::marker::Sync> Framed<W> {
43    /// Create a new `Framed` source
44    pub fn new(
45        input_path: PathBuf,
46        color_input: bool,
47        scale: f64,
48    ) -> Result<Framed<W>, SourceError> {
49        let source = Locator::Path(input_path);
50        let mut cap = Decoder::new(&source)?;
51        let (width, height) = cap.size();
52        let width = ((width as f64) * scale) as u32;
53        let height = ((height as f64) * scale) as u32;
54
55        cap = Decoder::new_with_options_and_resize(
56            &source,
57            &Options::default(),
58            Resize::Fit(width, height),
59        )?;
60
61        // Calculate TPS based on ticks per frame and source FPS
62        let source_fps = cap.frame_rate();
63        let (width, height) = cap.size_out();
64
65        let plane = PlaneSize::new(width as u16, height as u16, if color_input { 3 } else { 1 })?;
66
67        let video = Video::new(plane, FramePerfect, None)?;
68
69        Ok(Framed {
70            cap,
71            input_frame: Frame::default((height as usize, width as usize, 3)), // Note that this will be limited to 8-bit precision (due to video-rs crate)
72            frame_idx_start: 0,
73            source_fps,
74            scale,
75            color_input,
76            video,
77        })
78    }
79
80    /// Set the start frame of the source
81    pub fn frame_start(mut self, frame_idx_start: u32) -> Result<Self, SourceError> {
82        let video_frame_count = self.cap.frame_count();
83        if frame_idx_start >= video_frame_count as u32 {
84            return Err(SourceError::StartOutOfBounds(frame_idx_start));
85        };
86        let ts_millis = (frame_idx_start as f32 / self.source_fps * 1000.0) as i64;
87        self.cap.reader.seek(ts_millis)?;
88
89        self.frame_idx_start = frame_idx_start;
90        Ok(self)
91    }
92
93    /// Automatically derive the ticks per second from the source FPS and `ref_time`
94    pub fn auto_time_parameters(
95        mut self,
96        ref_time: DeltaT,
97        delta_t_max: DeltaT,
98        time_mode: Option<TimeMode>,
99    ) -> Result<Self, SourceError> {
100        if delta_t_max % ref_time == 0 {
101            let tps = (ref_time as f32 * self.source_fps) as DeltaT;
102            self.video = self
103                .video
104                .time_parameters(tps, ref_time, delta_t_max, time_mode)?;
105        } else {
106            return Err(SourceError::BadParams(
107                "delta_t_max must be a multiple of ref_time".to_string(),
108            ));
109        }
110        Ok(self)
111    }
112
113    /// Get the number of ticks each frame is said to span
114    pub fn get_ref_time(&self) -> u32 {
115        self.video.state.params.ref_time
116    }
117
118    /// Get the previous input frame
119    pub fn get_last_input_frame(&self) -> &Frame {
120        &self.input_frame
121    }
122}
123
124impl<W: Write + 'static + std::marker::Send + std::marker::Sync> Source<W> for Framed<W> {
125    /// Get pixel-wise intensities directly from source frame, and integrate them with
126    /// `ref_time` (the number of ticks each frame is said to span)
127    fn consume(&mut self) -> Result<Vec<Vec<Event>>, SourceError> {
128        let (_, frame) = self.cap.decode()?;
129        self.input_frame = handle_color(frame, self.color_input)?;
130
131        let res = self.video.integrate_matrix(
132            self.input_frame.clone(),
133            self.video.state.params.ref_time as f32,
134        );
135        #[cfg(feature = "feature-logging")]
136        {
137            if let Some(handle) = &mut self.video.state.feature_log_handle {
138                // Calculate the quality metrics
139                let mut image_mat = self.video.state.running_intensities.clone();
140
141                #[rustfmt::skip]
142                    let metrics = calculate_quality_metrics(
143                    &self.input_frame,
144                    &image_mat,
145                    QualityMetrics {
146                        mse: Some(0.0),
147                        psnr: Some(0.0),
148                        ssim: None,
149                    });
150
151                let metrics = metrics.unwrap();
152                let bytes = serde_pickle::to_vec(&metrics, Default::default()).unwrap();
153                handle.write_all(&bytes).unwrap();
154            }
155        }
156        res
157    }
158
159    fn crf(&mut self, crf: u8) {
160        self.video.update_crf(crf);
161    }
162
163    fn get_video_mut(&mut self) -> &mut Video<W> {
164        &mut self.video
165    }
166
167    fn get_video_ref(&self) -> &Video<W> {
168        &self.video
169    }
170
171    fn get_video(self) -> Video<W> {
172        todo!()
173    }
174
175    fn get_input(&self) -> Option<&Frame> {
176        Some(self.get_last_input_frame())
177    }
178
179    fn get_running_input_bitrate(&self) -> f64 {
180        let video = self.get_video_ref();
181        video.get_tps() as f64 / video.get_ref_time() as f64
182            * video.state.plane.volume() as f64
183            * 8.0
184    }
185}
186
187impl<W: Write + 'static + std::marker::Send + std::marker::Sync> VideoBuilder<W> for Framed<W> {
188    fn crf(mut self, crf: u8) -> Self {
189        self.video.update_crf(crf);
190        self
191    }
192
193    fn quality_manual(
194        mut self,
195        c_thresh_baseline: u8,
196        c_thresh_max: u8,
197        delta_t_max_multiplier: u32,
198        c_increase_velocity: u8,
199        feature_c_radius_denom: f32,
200    ) -> Self {
201        self.video.update_quality_manual(
202            c_thresh_baseline,
203            c_thresh_max,
204            delta_t_max_multiplier,
205            c_increase_velocity,
206            feature_c_radius_denom,
207        );
208        self
209    }
210
211    fn chunk_rows(mut self, chunk_rows: usize) -> Self {
212        self.video = self.video.chunk_rows(chunk_rows);
213        self
214    }
215
216    fn time_parameters(
217        mut self,
218        tps: DeltaT,
219        ref_time: DeltaT,
220        delta_t_max: DeltaT,
221        time_mode: Option<TimeMode>,
222    ) -> Result<Self, SourceError> {
223        if delta_t_max % ref_time == 0 {
224            self.video = self
225                .video
226                .time_parameters(tps, ref_time, delta_t_max, time_mode)?;
227        } else {
228            eprintln!("delta_t_max must be a multiple of ref_time");
229        }
230        Ok(self)
231    }
232
233    fn write_out(
234        mut self,
235        source_camera: SourceCamera,
236        time_mode: TimeMode,
237        pixel_multi_mode: PixelMultiMode,
238        adu_interval: Option<usize>,
239        encoder_type: EncoderType,
240        encoder_options: EncoderOptions,
241        write: W,
242    ) -> Result<Box<Self>, SourceError> {
243        self.video = self.video.write_out(
244            Some(source_camera),
245            Some(time_mode),
246            Some(pixel_multi_mode),
247            adu_interval,
248            encoder_type,
249            encoder_options,
250            write,
251        )?;
252        Ok(Box::new(self))
253    }
254
255    fn detect_features(mut self, detect_features: bool, show_features: ShowFeatureMode) -> Self {
256        self.video = self.video.detect_features(detect_features, show_features);
257        self
258    }
259
260    #[cfg(feature = "feature-logging")]
261    fn log_path(mut self, name: String) -> Self {
262        let date_time = Local::now();
263        let formatted = format!("{}_{}.log", name, date_time.format("%d_%m_%Y_%H_%M_%S"));
264        let log_handle = std::fs::File::create(formatted).ok();
265        self.video.state.feature_log_handle = log_handle;
266
267        // Write the plane size to the log file
268        if let Some(handle) = &mut self.video.state.feature_log_handle {
269            writeln!(
270                handle,
271                "{}x{}x{}",
272                self.video.state.plane.w(),
273                self.video.state.plane.h(),
274                self.video.state.plane.c()
275            )
276            .unwrap();
277        }
278        self
279    }
280}