#![deny(bare_trait_objects)]
#![deny(clippy::as_ptr_cast_mut)]
#![deny(clippy::cast_ptr_alignment)]
#![deny(clippy::large_stack_arrays)]
#![deny(clippy::ptr_as_ptr)]
#![deny(clippy::transmute_ptr_to_ptr)]
#![deny(clippy::unwrap_used)]
#![warn(clippy::cloned_instead_of_copied)]
#![warn(clippy::inefficient_to_string)]
#![warn(clippy::invalid_upcast_comparisons)]
#![warn(clippy::iter_with_drain)]
#![warn(clippy::large_types_passed_by_value)]
#![warn(clippy::linkedlist)]
#![warn(clippy::mutex_integer)]
#![warn(clippy::naive_bytecount)]
#![warn(clippy::needless_bitwise_bool)]
#![warn(clippy::needless_collect)]
#![warn(clippy::needless_pass_by_value)]
#![warn(clippy::no_effect_underscore_binding)]
#![warn(clippy::or_fun_call)]
#![warn(clippy::stable_sort_primitive)]
#![warn(clippy::suboptimal_flops)]
#![warn(clippy::trivial_regex)]
#![warn(clippy::trivially_copy_pass_by_ref)]
#![warn(clippy::unnecessary_join)]
#![warn(clippy::unused_async)]
#![warn(clippy::zero_sized_map_values)]
#![deny(clippy::case_sensitive_file_extension_comparisons)]
#![deny(clippy::copy_iterator)]
#![deny(clippy::expl_impl_clone_on_copy)]
#![deny(clippy::float_cmp)]
#![warn(clippy::imprecise_flops)]
#![deny(clippy::manual_instant_elapsed)]
#![deny(clippy::match_same_arms)]
#![deny(clippy::mem_forget)]
#![warn(clippy::must_use_candidate)]
#![deny(clippy::path_buf_push_overwrite)]
#![deny(clippy::same_functions_in_if_condition)]
#![warn(clippy::suspicious_operation_groupings)]
#![deny(clippy::unchecked_duration_subtraction)]
#![deny(clippy::unicode_not_nfc)]
#![warn(clippy::borrow_as_ptr)]
#![warn(clippy::checked_conversions)]
#![warn(clippy::default_trait_access)]
#![warn(clippy::derive_partial_eq_without_eq)]
#![warn(clippy::explicit_deref_methods)]
#![warn(clippy::filter_map_next)]
#![warn(clippy::flat_map_option)]
#![warn(clippy::fn_params_excessive_bools)]
#![warn(clippy::from_iter_instead_of_collect)]
#![warn(clippy::if_not_else)]
#![warn(clippy::implicit_clone)]
#![warn(clippy::iter_not_returning_iterator)]
#![warn(clippy::iter_on_empty_collections)]
#![warn(clippy::macro_use_imports)]
#![warn(clippy::manual_clamp)]
#![warn(clippy::manual_let_else)]
#![warn(clippy::manual_ok_or)]
#![warn(clippy::manual_string_new)]
#![warn(clippy::map_flatten)]
#![warn(clippy::map_unwrap_or)]
#![warn(clippy::match_bool)]
#![warn(clippy::mut_mut)]
#![warn(clippy::needless_borrow)]
#![warn(clippy::needless_continue)]
#![warn(clippy::option_if_let_else)]
#![warn(clippy::range_minus_one)]
#![warn(clippy::range_plus_one)]
#![warn(clippy::redundant_else)]
#![warn(clippy::ref_binding_to_reference)]
#![warn(clippy::ref_option_ref)]
#![warn(clippy::semicolon_if_nothing_returned)]
#![warn(clippy::trait_duplication_in_bounds)]
#![warn(clippy::type_repetition_in_bounds)]
#![warn(clippy::unnested_or_patterns)]
#![warn(clippy::unused_peekable)]
#![warn(clippy::unused_rounding)]
#![warn(clippy::unused_self)]
#![warn(clippy::used_underscore_binding)]
#![warn(clippy::verbose_bit_mask)]
#![warn(clippy::verbose_file_reads)]
#![warn(clippy::doc_link_with_quotes)]
#![warn(clippy::doc_markdown)]
#![warn(clippy::missing_errors_doc)]
#![warn(clippy::missing_panics_doc)]
pub mod decoder;
#[cfg(feature = "ffmpeg")]
pub mod ffmpeg;
#[cfg(feature = "vapoursynth")]
pub mod vapoursynth;
mod y4m;
use std::{
collections::{BTreeMap, BTreeSet},
io::Read,
sync::Arc,
time::Instant,
};
pub use ::y4m::Decoder as Y4mDecoder;
use decoder::Decoder;
pub use rav1e::scenechange::SceneChangeDetector;
use rav1e::{
config::{CpuFeatureLevel, EncoderConfig},
prelude::{Pixel, Sequence},
};
#[derive(Debug, Clone, Copy)]
pub struct DetectionOptions {
pub analysis_speed: SceneDetectionSpeed,
pub detect_flashes: bool,
pub min_scenecut_distance: Option<usize>,
pub max_scenecut_distance: Option<usize>,
pub lookahead_distance: usize,
}
impl Default for DetectionOptions {
fn default() -> Self {
DetectionOptions {
analysis_speed: SceneDetectionSpeed::Standard,
detect_flashes: true,
lookahead_distance: 5,
min_scenecut_distance: None,
max_scenecut_distance: None,
}
}
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "serialize", derive(serde::Serialize))]
pub struct DetectionResults {
pub scene_changes: Vec<usize>,
pub frame_count: usize,
pub speed: f64,
}
pub fn new_detector<R: Read, T: Pixel>(
dec: &mut Decoder<R>,
opts: DetectionOptions,
) -> anyhow::Result<SceneChangeDetector<T>> {
let video_details = dec.get_video_details()?;
let mut config =
EncoderConfig::with_speed_preset(if opts.analysis_speed == SceneDetectionSpeed::Fast {
10
} else {
8
});
config.min_key_frame_interval = opts.min_scenecut_distance.map_or(0, |val| val as u64);
config.max_key_frame_interval = opts
.max_scenecut_distance
.map_or_else(|| u32::MAX.into(), |val| val as u64);
config.width = video_details.width;
config.height = video_details.height;
config.bit_depth = video_details.bit_depth;
config.time_base = video_details.time_base;
config.chroma_sampling = video_details.chroma_sampling;
config.chroma_sample_position = video_details.chroma_sample_position;
config.speed_settings.transform.tx_domain_distortion = true;
let sequence = Arc::new(Sequence::new(&config));
Ok(SceneChangeDetector::new(
config,
CpuFeatureLevel::default(),
if opts.detect_flashes {
opts.lookahead_distance
} else {
1
},
sequence,
))
}
#[allow(clippy::needless_pass_by_value)]
pub fn detect_scene_changes<R: Read, T: Pixel>(
dec: &mut Decoder<R>,
opts: DetectionOptions,
frame_limit: Option<usize>,
progress_callback: Option<&dyn Fn(usize, usize)>,
) -> anyhow::Result<DetectionResults> {
assert!(opts.lookahead_distance >= 1);
let mut detector = new_detector::<R, T>(dec, opts)?;
let video_details = dec.get_video_details()?;
let mut frame_queue = BTreeMap::new();
let mut keyframes = BTreeSet::new();
keyframes.insert(0);
let start_time = Instant::now();
let mut frameno = 0;
loop {
let mut next_input_frameno = frame_queue.keys().last().copied().map_or(0, |key| key + 1);
while next_input_frameno
< (frameno + opts.lookahead_distance + 1).min(frame_limit.unwrap_or(usize::MAX))
{
let frame = dec.read_video_frame(&video_details);
if let Ok(frame) = frame {
frame_queue.insert(next_input_frameno, Arc::new(frame));
next_input_frameno += 1;
} else {
break;
}
}
let frame_set = frame_queue
.values()
.take(opts.lookahead_distance + 2)
.collect::<Vec<_>>();
if frame_set.len() < 2 {
break;
}
if frameno == 0
|| detector.analyze_next_frame(
&frame_set,
frameno as u64,
*keyframes
.iter()
.last()
.expect("at least 1 keyframe should exist"),
)
{
keyframes.insert(frameno as u64);
};
if frameno > 0 {
frame_queue.remove(&(frameno - 1));
}
frameno += 1;
if let Some(progress_fn) = progress_callback {
progress_fn(frameno, keyframes.len());
}
if let Some(frame_limit) = frame_limit {
if frameno == frame_limit {
break;
}
}
}
Ok(DetectionResults {
scene_changes: keyframes.into_iter().map(|val| val as usize).collect(),
frame_count: frameno,
speed: frameno as f64 / start_time.elapsed().as_secs_f64(),
})
}
#[derive(Clone, Copy, Debug, PartialOrd, PartialEq, Eq)]
pub enum SceneDetectionSpeed {
Fast,
Standard,
}