Skip to main content

oximedia_codec/rate_control/
complexity.rs

1//! Frame complexity estimation.
2//!
3//! This module provides methods for estimating frame complexity, which is
4//! essential for accurate rate control. Complexity metrics include:
5//!
6//! - Spatial complexity (texture/detail)
7//! - Temporal complexity (motion)
8//! - Combined metrics for rate control decisions
9
10#![allow(clippy::cast_lossless)]
11#![allow(clippy::cast_precision_loss)]
12#![allow(clippy::cast_possible_truncation)]
13#![allow(clippy::cast_sign_loss)]
14#![allow(clippy::manual_let_else)]
15#![allow(clippy::used_underscore_binding)]
16#![allow(clippy::needless_range_loop)]
17#![allow(clippy::manual_clamp)]
18#![forbid(unsafe_code)]
19
20/// Frame complexity estimator.
21///
22/// Estimates spatial and temporal complexity of video frames to guide
23/// rate control decisions.
24#[derive(Clone, Debug)]
25pub struct ComplexityEstimator {
26    /// Frame width.
27    width: u32,
28    /// Frame height.
29    height: u32,
30    /// Block size for analysis.
31    block_size: u32,
32    /// Running average of spatial complexity.
33    avg_spatial: f32,
34    /// Running average of temporal complexity.
35    avg_temporal: f32,
36    /// Running average of combined complexity.
37    avg_combined: f32,
38    /// Weight for exponential moving average.
39    ema_weight: f32,
40    /// Frame counter.
41    frame_count: u64,
42    /// Previous frame data for temporal analysis.
43    prev_frame_data: Option<FrameData>,
44}
45
46/// Stored frame data for temporal analysis.
47#[derive(Clone, Debug)]
48struct FrameData {
49    /// Per-block variance values (reserved for motion-compensated analysis).
50    #[allow(dead_code)]
51    block_variances: Vec<f32>,
52    /// Per-block average values (reserved for motion-compensated analysis).
53    #[allow(dead_code)]
54    block_averages: Vec<f32>,
55    /// Total frame variance.
56    total_variance: f32,
57}
58
59impl ComplexityEstimator {
60    /// Create a new complexity estimator.
61    #[must_use]
62    pub fn new(width: u32, height: u32) -> Self {
63        Self {
64            width,
65            height,
66            block_size: 16,
67            avg_spatial: 1.0,
68            avg_temporal: 1.0,
69            avg_combined: 1.0,
70            ema_weight: 0.1,
71            frame_count: 0,
72            prev_frame_data: None,
73        }
74    }
75
76    /// Set the block size for analysis.
77    pub fn set_block_size(&mut self, size: u32) {
78        self.block_size = size.max(4).min(64);
79    }
80
81    /// Estimate complexity of a frame's luma plane.
82    #[must_use]
83    pub fn estimate(&mut self, luma: &[u8], stride: usize) -> ComplexityResult {
84        let spatial = self.estimate_spatial(luma, stride);
85        let temporal = self.estimate_temporal(luma, stride);
86
87        // Combined metric (geometric mean)
88        let combined = (spatial * temporal).sqrt();
89
90        // Update running averages
91        self.update_averages(spatial, temporal, combined);
92
93        // Store frame data for next frame's temporal analysis
94        self.store_frame_data(luma, stride);
95
96        self.frame_count += 1;
97
98        ComplexityResult {
99            spatial,
100            temporal,
101            combined,
102            normalized: combined / self.avg_combined,
103        }
104    }
105
106    /// Estimate spatial complexity using variance-based method.
107    fn estimate_spatial(&self, luma: &[u8], stride: usize) -> f32 {
108        let blocks_x = self.width / self.block_size;
109        let blocks_y = self.height / self.block_size;
110
111        if blocks_x == 0 || blocks_y == 0 {
112            return 1.0;
113        }
114
115        let mut total_variance = 0.0f64;
116        let mut block_count = 0u32;
117
118        for by in 0..blocks_y {
119            for bx in 0..blocks_x {
120                let variance = self.calculate_block_variance(luma, stride, bx, by);
121                total_variance += variance as f64;
122                block_count += 1;
123            }
124        }
125
126        if block_count == 0 {
127            return 1.0;
128        }
129
130        let avg_variance = (total_variance / block_count as f64) as f32;
131
132        // Normalize variance to a reasonable scale
133        // High variance = high complexity
134        (avg_variance / 100.0).sqrt().max(0.1)
135    }
136
137    /// Calculate variance of a single block.
138    fn calculate_block_variance(&self, luma: &[u8], stride: usize, bx: u32, by: u32) -> f32 {
139        let start_x = (bx * self.block_size) as usize;
140        let start_y = (by * self.block_size) as usize;
141        let block_size = self.block_size as usize;
142
143        let mut sum = 0u64;
144        let mut sum_sq = 0u64;
145        let mut count = 0u32;
146
147        for y in 0..block_size {
148            let row_start = (start_y + y) * stride + start_x;
149            if row_start + block_size > luma.len() {
150                continue;
151            }
152
153            for x in 0..block_size {
154                let pixel = luma[row_start + x] as u64;
155                sum += pixel;
156                sum_sq += pixel * pixel;
157                count += 1;
158            }
159        }
160
161        if count == 0 {
162            return 0.0;
163        }
164
165        let mean = sum as f32 / count as f32;
166        let mean_sq = sum_sq as f32 / count as f32;
167        let variance = mean_sq - mean * mean;
168
169        variance.max(0.0)
170    }
171
172    /// Estimate temporal complexity using SAD-based method.
173    fn estimate_temporal(&self, luma: &[u8], stride: usize) -> f32 {
174        let prev_data = match &self.prev_frame_data {
175            Some(data) => data,
176            None => return 1.0, // First frame, assume average complexity
177        };
178
179        // Calculate SAD between current and previous frame
180        let sad = self.calculate_frame_sad(luma, stride, prev_data);
181
182        // Normalize SAD to complexity metric
183        let pixels = self.width as f64 * self.height as f64;
184        if pixels == 0.0 {
185            return 1.0;
186        }
187
188        let normalized_sad = sad as f64 / pixels;
189
190        // Map to reasonable range
191        (normalized_sad as f32 / 10.0).sqrt().max(0.1)
192    }
193
194    /// Calculate SAD (Sum of Absolute Differences) against previous frame.
195    fn calculate_frame_sad(&self, luma: &[u8], stride: usize, _prev: &FrameData) -> u64 {
196        // Simplified: just calculate variance difference
197        // In a full implementation, this would do motion-compensated SAD
198        let current_variance = self.calculate_total_variance(luma, stride);
199        let prev_variance = _prev.total_variance;
200
201        ((current_variance - prev_variance).abs() * 1000.0) as u64
202    }
203
204    /// Calculate total frame variance.
205    fn calculate_total_variance(&self, luma: &[u8], stride: usize) -> f32 {
206        let total_pixels = (self.width * self.height) as usize;
207        if total_pixels == 0 || luma.len() < total_pixels {
208            return 0.0;
209        }
210
211        let mut sum = 0u64;
212        let mut sum_sq = 0u64;
213        let mut count = 0u64;
214
215        for y in 0..self.height as usize {
216            let row_start = y * stride;
217            let row_end = (row_start + self.width as usize).min(luma.len());
218
219            for x in row_start..row_end {
220                let pixel = luma[x] as u64;
221                sum += pixel;
222                sum_sq += pixel * pixel;
223                count += 1;
224            }
225        }
226
227        if count == 0 {
228            return 0.0;
229        }
230
231        let mean = sum as f32 / count as f32;
232        let mean_sq = sum_sq as f32 / count as f32;
233        (mean_sq - mean * mean).max(0.0)
234    }
235
236    /// Store frame data for temporal analysis.
237    fn store_frame_data(&mut self, luma: &[u8], stride: usize) {
238        let blocks_x = self.width / self.block_size;
239        let blocks_y = self.height / self.block_size;
240
241        let mut block_variances = Vec::with_capacity((blocks_x * blocks_y) as usize);
242        let mut block_averages = Vec::with_capacity((blocks_x * blocks_y) as usize);
243
244        for by in 0..blocks_y {
245            for bx in 0..blocks_x {
246                let (avg, var) = self.calculate_block_stats(luma, stride, bx, by);
247                block_variances.push(var);
248                block_averages.push(avg);
249            }
250        }
251
252        let total_variance = self.calculate_total_variance(luma, stride);
253
254        self.prev_frame_data = Some(FrameData {
255            block_variances,
256            block_averages,
257            total_variance,
258        });
259    }
260
261    /// Calculate block average and variance.
262    fn calculate_block_stats(&self, luma: &[u8], stride: usize, bx: u32, by: u32) -> (f32, f32) {
263        let start_x = (bx * self.block_size) as usize;
264        let start_y = (by * self.block_size) as usize;
265        let block_size = self.block_size as usize;
266
267        let mut sum = 0u64;
268        let mut sum_sq = 0u64;
269        let mut count = 0u32;
270
271        for y in 0..block_size {
272            let row_start = (start_y + y) * stride + start_x;
273            if row_start + block_size > luma.len() {
274                continue;
275            }
276
277            for x in 0..block_size {
278                let pixel = luma[row_start + x] as u64;
279                sum += pixel;
280                sum_sq += pixel * pixel;
281                count += 1;
282            }
283        }
284
285        if count == 0 {
286            return (128.0, 0.0);
287        }
288
289        let mean = sum as f32 / count as f32;
290        let mean_sq = sum_sq as f32 / count as f32;
291        let variance = (mean_sq - mean * mean).max(0.0);
292
293        (mean, variance)
294    }
295
296    /// Update running averages.
297    fn update_averages(&mut self, spatial: f32, temporal: f32, combined: f32) {
298        let w = self.ema_weight;
299        self.avg_spatial = self.avg_spatial * (1.0 - w) + spatial * w;
300        self.avg_temporal = self.avg_temporal * (1.0 - w) + temporal * w;
301        self.avg_combined = self.avg_combined * (1.0 - w) + combined * w;
302    }
303
304    /// Get average spatial complexity.
305    #[must_use]
306    pub fn avg_spatial(&self) -> f32 {
307        self.avg_spatial
308    }
309
310    /// Get average temporal complexity.
311    #[must_use]
312    pub fn avg_temporal(&self) -> f32 {
313        self.avg_temporal
314    }
315
316    /// Get average combined complexity.
317    #[must_use]
318    pub fn avg_combined(&self) -> f32 {
319        self.avg_combined
320    }
321
322    /// Get frame count.
323    #[must_use]
324    pub fn frame_count(&self) -> u64 {
325        self.frame_count
326    }
327
328    /// Reset the estimator state.
329    pub fn reset(&mut self) {
330        self.avg_spatial = 1.0;
331        self.avg_temporal = 1.0;
332        self.avg_combined = 1.0;
333        self.frame_count = 0;
334        self.prev_frame_data = None;
335    }
336}
337
338impl Default for ComplexityEstimator {
339    fn default() -> Self {
340        Self::new(1920, 1080)
341    }
342}
343
344/// Result of complexity estimation.
345#[derive(Clone, Copy, Debug, Default)]
346pub struct ComplexityResult {
347    /// Spatial complexity (texture/detail).
348    pub spatial: f32,
349    /// Temporal complexity (motion).
350    pub temporal: f32,
351    /// Combined complexity metric.
352    pub combined: f32,
353    /// Normalized complexity (relative to average).
354    pub normalized: f32,
355}
356
357impl ComplexityResult {
358    /// Create a result with default complexity.
359    #[must_use]
360    pub fn default_complexity() -> Self {
361        Self {
362            spatial: 1.0,
363            temporal: 1.0,
364            combined: 1.0,
365            normalized: 1.0,
366        }
367    }
368
369    /// Check if this is a high complexity frame.
370    #[must_use]
371    pub fn is_high_complexity(&self) -> bool {
372        self.normalized > 1.5
373    }
374
375    /// Check if this is a low complexity frame.
376    #[must_use]
377    pub fn is_low_complexity(&self) -> bool {
378        self.normalized < 0.7
379    }
380
381    /// Get suggested QP adjustment based on complexity.
382    #[must_use]
383    pub fn suggested_qp_offset(&self) -> f32 {
384        // Higher complexity -> higher QP offset (use more compression)
385        // Lower complexity -> lower QP offset (better quality)
386        let log_ratio = self.normalized.ln();
387        (log_ratio * 2.0).clamp(-4.0, 4.0)
388    }
389}
390
391/// Motion complexity analyzer.
392#[derive(Clone, Debug)]
393pub struct MotionAnalyzer {
394    /// Block size for motion analysis.
395    block_size: u32,
396    /// Search range for motion estimation (reserved for full motion search).
397    #[allow(dead_code)]
398    search_range: u32,
399    /// Previous frame luma.
400    prev_luma: Option<Vec<u8>>,
401    /// Frame width.
402    width: u32,
403    /// Frame height.
404    height: u32,
405}
406
407impl MotionAnalyzer {
408    /// Create a new motion analyzer.
409    #[must_use]
410    pub fn new(width: u32, height: u32) -> Self {
411        Self {
412            block_size: 16,
413            search_range: 16,
414            prev_luma: None,
415            width,
416            height,
417        }
418    }
419
420    /// Analyze motion in a frame.
421    pub fn analyze(&mut self, luma: &[u8], stride: usize) -> MotionResult {
422        let result = if let Some(ref prev) = self.prev_luma {
423            self.calculate_motion(prev, luma, stride)
424        } else {
425            MotionResult::default()
426        };
427
428        // Store current frame for next comparison
429        self.store_frame(luma, stride);
430
431        result
432    }
433
434    /// Calculate motion metrics between frames.
435    fn calculate_motion(&self, prev: &[u8], curr: &[u8], stride: usize) -> MotionResult {
436        let blocks_x = self.width / self.block_size;
437        let blocks_y = self.height / self.block_size;
438
439        if blocks_x == 0 || blocks_y == 0 {
440            return MotionResult::default();
441        }
442
443        let mut total_sad = 0u64;
444        let mut motion_blocks = 0u32;
445        let mut max_motion = 0f32;
446
447        for by in 0..blocks_y {
448            for bx in 0..blocks_x {
449                let (sad, motion) = self.analyze_block(prev, curr, stride, bx, by);
450                total_sad += sad;
451
452                if motion > 2.0 {
453                    motion_blocks += 1;
454                }
455                if motion > max_motion {
456                    max_motion = motion;
457                }
458            }
459        }
460
461        let total_blocks = blocks_x * blocks_y;
462        let avg_sad = total_sad as f32 / total_blocks as f32;
463        let motion_ratio = motion_blocks as f32 / total_blocks as f32;
464
465        MotionResult {
466            average_sad: avg_sad,
467            motion_ratio,
468            max_motion,
469            complexity: (avg_sad / 100.0).sqrt() * (1.0 + motion_ratio),
470        }
471    }
472
473    /// Analyze a single block for motion.
474    fn analyze_block(
475        &self,
476        prev: &[u8],
477        curr: &[u8],
478        stride: usize,
479        bx: u32,
480        by: u32,
481    ) -> (u64, f32) {
482        let start_x = (bx * self.block_size) as usize;
483        let start_y = (by * self.block_size) as usize;
484        let block_size = self.block_size as usize;
485
486        // Calculate SAD at (0,0) position (no motion)
487        let mut sad = 0u64;
488
489        for y in 0..block_size {
490            let curr_row = (start_y + y) * stride + start_x;
491            let prev_row = (start_y + y) * stride + start_x;
492
493            if curr_row + block_size > curr.len() || prev_row + block_size > prev.len() {
494                continue;
495            }
496
497            for x in 0..block_size {
498                let diff = (curr[curr_row + x] as i32 - prev[prev_row + x] as i32).unsigned_abs();
499                sad += diff as u64;
500            }
501        }
502
503        let pixels = (block_size * block_size) as f32;
504        let avg_diff = sad as f32 / pixels;
505
506        (sad, avg_diff)
507    }
508
509    /// Store frame for next comparison.
510    fn store_frame(&mut self, luma: &[u8], stride: usize) {
511        let height = self.height as usize;
512        let width = self.width as usize;
513
514        let mut stored = vec![0u8; width * height];
515
516        for y in 0..height {
517            let src_start = y * stride;
518            let dst_start = y * width;
519            let copy_len = width.min(luma.len().saturating_sub(src_start));
520
521            if copy_len > 0 {
522                stored[dst_start..dst_start + copy_len]
523                    .copy_from_slice(&luma[src_start..src_start + copy_len]);
524            }
525        }
526
527        self.prev_luma = Some(stored);
528    }
529
530    /// Reset the analyzer.
531    pub fn reset(&mut self) {
532        self.prev_luma = None;
533    }
534}
535
536impl Default for MotionAnalyzer {
537    fn default() -> Self {
538        Self::new(1920, 1080)
539    }
540}
541
542/// Motion analysis result.
543#[derive(Clone, Copy, Debug, Default)]
544pub struct MotionResult {
545    /// Average SAD (Sum of Absolute Differences).
546    pub average_sad: f32,
547    /// Ratio of blocks with significant motion.
548    pub motion_ratio: f32,
549    /// Maximum motion detected in any block.
550    pub max_motion: f32,
551    /// Overall motion complexity metric.
552    pub complexity: f32,
553}
554
555impl MotionResult {
556    /// Check if this indicates a high motion frame.
557    #[must_use]
558    pub fn is_high_motion(&self) -> bool {
559        self.motion_ratio > 0.5 || self.max_motion > 20.0
560    }
561
562    /// Check if this indicates a static/low motion frame.
563    #[must_use]
564    pub fn is_static(&self) -> bool {
565        self.motion_ratio < 0.1 && self.max_motion < 5.0
566    }
567}
568
569#[cfg(test)]
570mod tests {
571    use super::*;
572
573    fn create_test_frame(width: u32, height: u32, value: u8) -> Vec<u8> {
574        vec![value; (width * height) as usize]
575    }
576
577    fn create_gradient_frame(width: u32, height: u32) -> Vec<u8> {
578        let mut frame = Vec::with_capacity((width * height) as usize);
579        for y in 0..height {
580            for x in 0..width {
581                frame.push(((x + y) % 256) as u8);
582            }
583        }
584        frame
585    }
586
587    #[test]
588    fn test_complexity_estimator_creation() {
589        let estimator = ComplexityEstimator::new(1920, 1080);
590        assert_eq!(estimator.frame_count(), 0);
591    }
592
593    #[test]
594    fn test_uniform_frame_low_complexity() {
595        let mut estimator = ComplexityEstimator::new(64, 64);
596        let frame = create_test_frame(64, 64, 128);
597
598        let result = estimator.estimate(&frame, 64);
599
600        // Uniform frame should have low spatial complexity
601        assert!(result.spatial < 0.5);
602    }
603
604    #[test]
605    fn test_gradient_frame_higher_complexity() {
606        let mut estimator = ComplexityEstimator::new(64, 64);
607        let frame = create_gradient_frame(64, 64);
608
609        let result = estimator.estimate(&frame, 64);
610
611        // Gradient has some texture, should have measurable complexity
612        assert!(result.spatial > 0.0);
613    }
614
615    #[test]
616    fn test_temporal_complexity_static() {
617        let mut estimator = ComplexityEstimator::new(64, 64);
618        let frame = create_test_frame(64, 64, 128);
619
620        // First frame
621        let _ = estimator.estimate(&frame, 64);
622
623        // Same frame again - should have low temporal complexity
624        let result = estimator.estimate(&frame, 64);
625        assert!(result.temporal <= 1.0);
626    }
627
628    #[test]
629    fn test_complexity_result_methods() {
630        let high = ComplexityResult {
631            spatial: 2.0,
632            temporal: 2.0,
633            combined: 2.0,
634            normalized: 2.0,
635        };
636        assert!(high.is_high_complexity());
637        assert!(!high.is_low_complexity());
638
639        let low = ComplexityResult {
640            spatial: 0.5,
641            temporal: 0.5,
642            combined: 0.5,
643            normalized: 0.5,
644        };
645        assert!(low.is_low_complexity());
646        assert!(!low.is_high_complexity());
647    }
648
649    #[test]
650    fn test_suggested_qp_offset() {
651        let high = ComplexityResult {
652            spatial: 1.0,
653            temporal: 1.0,
654            combined: 1.0,
655            normalized: 2.0,
656        };
657        assert!(high.suggested_qp_offset() > 0.0);
658
659        let low = ComplexityResult {
660            spatial: 1.0,
661            temporal: 1.0,
662            combined: 1.0,
663            normalized: 0.5,
664        };
665        assert!(low.suggested_qp_offset() < 0.0);
666    }
667
668    #[test]
669    fn test_motion_analyzer_creation() {
670        let analyzer = MotionAnalyzer::new(1920, 1080);
671        assert!(analyzer.prev_luma.is_none());
672    }
673
674    #[test]
675    fn test_motion_analyzer_static() {
676        let mut analyzer = MotionAnalyzer::new(64, 64);
677        let frame = create_test_frame(64, 64, 128);
678
679        // First frame
680        let _ = analyzer.analyze(&frame, 64);
681
682        // Same frame - no motion
683        let result = analyzer.analyze(&frame, 64);
684        assert!(result.is_static());
685    }
686
687    #[test]
688    fn test_motion_analyzer_reset() {
689        let mut analyzer = MotionAnalyzer::new(64, 64);
690        let frame = create_test_frame(64, 64, 128);
691
692        let _ = analyzer.analyze(&frame, 64);
693        assert!(analyzer.prev_luma.is_some());
694
695        analyzer.reset();
696        assert!(analyzer.prev_luma.is_none());
697    }
698
699    #[test]
700    fn test_estimator_reset() {
701        let mut estimator = ComplexityEstimator::new(64, 64);
702        let frame = create_test_frame(64, 64, 128);
703
704        let _ = estimator.estimate(&frame, 64);
705        assert_eq!(estimator.frame_count(), 1);
706
707        estimator.reset();
708        assert_eq!(estimator.frame_count(), 0);
709    }
710
711    #[test]
712    fn test_running_averages() {
713        let mut estimator = ComplexityEstimator::new(64, 64);
714        let frame = create_gradient_frame(64, 64);
715
716        for _ in 0..10 {
717            let _ = estimator.estimate(&frame, 64);
718        }
719
720        // Averages should converge
721        assert!(estimator.avg_spatial() > 0.0);
722        assert!(estimator.avg_combined() > 0.0);
723    }
724}