Skip to main content

agg_rust/
rendering_buffer.rs

1//! Rendering buffer — row-oriented access to pixel data.
2//!
3//! Port of `agg_rendering_buffer.h` — provides two strategies for accessing
4//! rows in a rectangular pixel buffer:
5//!
6//! - [`RowAccessor`]: computes row pointers on demand (multiplication per access).
7//!   Cheap to create; good default.
8//! - [`RowPtrCache`]: pre-computes and caches row pointers in a `Vec`.
9//!   Faster per-row access; requires allocation.
10//!
11//! Both support positive strides (top-down) and negative strides (bottom-up,
12//! e.g. BMP format). The `RenderingBuffer` type alias defaults to `RowAccessor`.
13
14/// Row data returned by `row()` — a slice range and pointer into a row.
15#[derive(Debug, Clone, Copy)]
16pub struct RowData<'a> {
17    pub x1: i32,
18    pub x2: i32,
19    pub ptr: &'a [u8],
20}
21
22// ============================================================================
23// RowAccessor
24// ============================================================================
25
26/// Row accessor that computes row pointers via base + y * stride.
27///
28/// Port of C++ `agg::row_accessor<int8u>`.
29pub struct RowAccessor {
30    buf: *mut u8,
31    start: *mut u8,
32    width: u32,
33    height: u32,
34    stride: i32,
35}
36
37impl RowAccessor {
38    /// Create an empty (unattached) row accessor.
39    pub fn new() -> Self {
40        Self {
41            buf: std::ptr::null_mut(),
42            start: std::ptr::null_mut(),
43            width: 0,
44            height: 0,
45            stride: 0,
46        }
47    }
48
49    /// Create and attach to a buffer.
50    ///
51    /// # Safety
52    /// `buf` must point to a valid buffer of at least `height * stride.abs()` bytes.
53    /// The buffer must remain valid for the lifetime of this accessor.
54    pub unsafe fn new_with_buf(buf: *mut u8, width: u32, height: u32, stride: i32) -> Self {
55        let mut ra = Self::new();
56        ra.attach(buf, width, height, stride);
57        ra
58    }
59
60    /// Attach to a buffer.
61    ///
62    /// # Safety
63    /// Same requirements as `new_with_buf`.
64    pub unsafe fn attach(&mut self, buf: *mut u8, width: u32, height: u32, stride: i32) {
65        self.buf = buf;
66        self.start = buf;
67        self.width = width;
68        self.height = height;
69        self.stride = stride;
70        if stride < 0 {
71            self.start = buf.offset(-((height as i64 - 1) * stride as i64) as isize);
72        }
73    }
74
75    /// Raw buffer pointer.
76    pub fn buf(&self) -> *mut u8 {
77        self.buf
78    }
79
80    pub fn width(&self) -> u32 {
81        self.width
82    }
83
84    pub fn height(&self) -> u32 {
85        self.height
86    }
87
88    pub fn stride(&self) -> i32 {
89        self.stride
90    }
91
92    pub fn stride_abs(&self) -> u32 {
93        self.stride.unsigned_abs()
94    }
95
96    /// Get a raw mutable pointer to a row.
97    ///
98    /// # Safety
99    /// `y` must be in `[0, height)`.
100    #[inline]
101    pub unsafe fn row_ptr(&self, y: i32) -> *mut u8 {
102        self.start.offset((y as i64 * self.stride as i64) as isize)
103    }
104
105    /// Get a safe immutable slice for row `y`.
106    ///
107    /// Returns the full row of `stride_abs()` bytes.
108    pub fn row_slice(&self, y: u32) -> &[u8] {
109        assert!(
110            y < self.height,
111            "row {} out of bounds (height={})",
112            y,
113            self.height
114        );
115        unsafe {
116            let ptr = self.row_ptr(y as i32);
117            std::slice::from_raw_parts(ptr, self.stride_abs() as usize)
118        }
119    }
120
121    /// Get a safe mutable slice for row `y`.
122    pub fn row_slice_mut(&mut self, y: u32) -> &mut [u8] {
123        assert!(
124            y < self.height,
125            "row {} out of bounds (height={})",
126            y,
127            self.height
128        );
129        unsafe {
130            let ptr = self.row_ptr(y as i32);
131            std::slice::from_raw_parts_mut(ptr, self.stride_abs() as usize)
132        }
133    }
134
135    /// Get row data (x1, x2 range + pointer).
136    pub fn row(&self, y: u32) -> RowData<'_> {
137        RowData {
138            x1: 0,
139            x2: self.width as i32 - 1,
140            ptr: self.row_slice(y),
141        }
142    }
143
144    /// Copy pixel data from another buffer (min of both dimensions).
145    pub fn copy_from<T: RenderingBufferAccess>(&mut self, src: &T) {
146        let h = self.height.min(src.height());
147        let l = self.stride_abs().min(src.stride_abs()) as usize;
148        for y in 0..h {
149            unsafe {
150                let dst = self.row_ptr(y as i32);
151                let src_ptr = src.row_ptr_const(y as i32);
152                std::ptr::copy_nonoverlapping(src_ptr, dst, l);
153            }
154        }
155    }
156
157    /// Fill every byte in the buffer with `value`.
158    pub fn clear(&mut self, value: u8) {
159        let stride = self.stride_abs() as usize;
160        for y in 0..self.height {
161            let row = self.row_slice_mut(y);
162            for byte in row[..stride].iter_mut() {
163                *byte = value;
164            }
165        }
166    }
167}
168
169impl Default for RowAccessor {
170    fn default() -> Self {
171        Self::new()
172    }
173}
174
175// ============================================================================
176// RowPtrCache
177// ============================================================================
178
179/// Row pointer cache that pre-computes pointers for O(1) row access.
180///
181/// Port of C++ `agg::row_ptr_cache<int8u>`.
182pub struct RowPtrCache {
183    buf: *mut u8,
184    rows: Vec<*mut u8>,
185    width: u32,
186    height: u32,
187    stride: i32,
188}
189
190impl RowPtrCache {
191    /// Create an empty (unattached) row pointer cache.
192    pub fn new() -> Self {
193        Self {
194            buf: std::ptr::null_mut(),
195            rows: Vec::new(),
196            width: 0,
197            height: 0,
198            stride: 0,
199        }
200    }
201
202    /// Create and attach to a buffer.
203    ///
204    /// # Safety
205    /// `buf` must point to a valid buffer of at least `height * stride.abs()` bytes.
206    /// The buffer must remain valid for the lifetime of this cache.
207    pub unsafe fn new_with_buf(buf: *mut u8, width: u32, height: u32, stride: i32) -> Self {
208        let mut rpc = Self::new();
209        rpc.attach(buf, width, height, stride);
210        rpc
211    }
212
213    /// Attach to a buffer, building the row pointer cache.
214    ///
215    /// # Safety
216    /// Same requirements as `new_with_buf`.
217    pub unsafe fn attach(&mut self, buf: *mut u8, width: u32, height: u32, stride: i32) {
218        self.buf = buf;
219        self.width = width;
220        self.height = height;
221        self.stride = stride;
222
223        if (height as usize) > self.rows.len() {
224            self.rows.resize(height as usize, std::ptr::null_mut());
225        }
226
227        let mut row_ptr = buf;
228        if stride < 0 {
229            row_ptr = buf.offset(-((height as i64 - 1) * stride as i64) as isize);
230        }
231
232        for y in 0..height as usize {
233            self.rows[y] = row_ptr;
234            row_ptr = row_ptr.offset(stride as isize);
235        }
236    }
237
238    /// Raw buffer pointer.
239    pub fn buf(&self) -> *mut u8 {
240        self.buf
241    }
242
243    pub fn width(&self) -> u32 {
244        self.width
245    }
246
247    pub fn height(&self) -> u32 {
248        self.height
249    }
250
251    pub fn stride(&self) -> i32 {
252        self.stride
253    }
254
255    pub fn stride_abs(&self) -> u32 {
256        self.stride.unsigned_abs()
257    }
258
259    /// Get a raw mutable pointer to a row (O(1) via cached pointers).
260    ///
261    /// # Safety
262    /// `y` must be in `[0, height)`.
263    #[inline]
264    pub unsafe fn row_ptr(&self, y: i32) -> *mut u8 {
265        *self.rows.get_unchecked(y as usize)
266    }
267
268    /// Get a safe immutable slice for row `y`.
269    pub fn row_slice(&self, y: u32) -> &[u8] {
270        assert!(
271            y < self.height,
272            "row {} out of bounds (height={})",
273            y,
274            self.height
275        );
276        unsafe {
277            let ptr = self.row_ptr(y as i32);
278            std::slice::from_raw_parts(ptr, self.stride_abs() as usize)
279        }
280    }
281
282    /// Get a safe mutable slice for row `y`.
283    pub fn row_slice_mut(&mut self, y: u32) -> &mut [u8] {
284        assert!(
285            y < self.height,
286            "row {} out of bounds (height={})",
287            y,
288            self.height
289        );
290        unsafe {
291            let ptr = self.row_ptr(y as i32);
292            std::slice::from_raw_parts_mut(ptr, self.stride_abs() as usize)
293        }
294    }
295
296    /// Get row data (x1, x2 range + pointer).
297    pub fn row(&self, y: u32) -> RowData<'_> {
298        RowData {
299            x1: 0,
300            x2: self.width as i32 - 1,
301            ptr: self.row_slice(y),
302        }
303    }
304
305    /// Copy pixel data from another buffer.
306    pub fn copy_from<T: RenderingBufferAccess>(&mut self, src: &T) {
307        let h = self.height.min(src.height());
308        let l = self.stride_abs().min(src.stride_abs()) as usize;
309        for y in 0..h {
310            unsafe {
311                let dst = self.row_ptr(y as i32);
312                let src_ptr = src.row_ptr_const(y as i32);
313                std::ptr::copy_nonoverlapping(src_ptr, dst, l);
314            }
315        }
316    }
317
318    /// Fill every byte in the buffer with `value`.
319    pub fn clear(&mut self, value: u8) {
320        let stride = self.stride_abs() as usize;
321        for y in 0..self.height {
322            let row = self.row_slice_mut(y);
323            for byte in row[..stride].iter_mut() {
324                *byte = value;
325            }
326        }
327    }
328}
329
330impl Default for RowPtrCache {
331    fn default() -> Self {
332        Self::new()
333    }
334}
335
336// ============================================================================
337// Common trait for both buffer types
338// ============================================================================
339
340/// Common interface for rendering buffer access (used by `copy_from`).
341pub trait RenderingBufferAccess {
342    fn width(&self) -> u32;
343    fn height(&self) -> u32;
344    fn stride_abs(&self) -> u32;
345    /// # Safety
346    /// `y` must be in `[0, height)`.
347    unsafe fn row_ptr_const(&self, y: i32) -> *const u8;
348}
349
350impl RenderingBufferAccess for RowAccessor {
351    fn width(&self) -> u32 {
352        self.width
353    }
354    fn height(&self) -> u32 {
355        self.height
356    }
357    fn stride_abs(&self) -> u32 {
358        self.stride_abs()
359    }
360    unsafe fn row_ptr_const(&self, y: i32) -> *const u8 {
361        self.row_ptr(y) as *const u8
362    }
363}
364
365impl RenderingBufferAccess for RowPtrCache {
366    fn width(&self) -> u32 {
367        self.width
368    }
369    fn height(&self) -> u32 {
370        self.height
371    }
372    fn stride_abs(&self) -> u32 {
373        self.stride_abs()
374    }
375    unsafe fn row_ptr_const(&self, y: i32) -> *const u8 {
376        self.row_ptr(y) as *const u8
377    }
378}
379
380/// Default rendering buffer type (matches C++ `typedef row_accessor<int8u> rendering_buffer`).
381pub type RenderingBuffer = RowAccessor;
382
383// ============================================================================
384// Tests
385// ============================================================================
386
387#[cfg(test)]
388mod tests {
389    use super::*;
390
391    #[test]
392    fn test_row_accessor_basic() {
393        let mut data = vec![0u8; 40]; // 10 wide × 4 high, RGBA
394        let rb = unsafe { RowAccessor::new_with_buf(data.as_mut_ptr(), 10, 4, 10) };
395        assert_eq!(rb.width(), 10);
396        assert_eq!(rb.height(), 4);
397        assert_eq!(rb.stride(), 10);
398        assert_eq!(rb.stride_abs(), 10);
399    }
400
401    #[test]
402    fn test_row_accessor_write_read() {
403        let mut data = vec![0u8; 30]; // 10 wide × 3 high
404        let mut rb = unsafe { RowAccessor::new_with_buf(data.as_mut_ptr(), 10, 3, 10) };
405
406        // Write to row 1
407        rb.row_slice_mut(1)[0] = 42;
408        rb.row_slice_mut(1)[9] = 99;
409
410        // Read back
411        assert_eq!(rb.row_slice(1)[0], 42);
412        assert_eq!(rb.row_slice(1)[9], 99);
413        assert_eq!(rb.row_slice(0)[0], 0); // row 0 unchanged
414    }
415
416    #[test]
417    fn test_row_accessor_negative_stride() {
418        let mut data = vec![0u8; 30]; // 10 wide × 3 high, bottom-up
419                                      // Fill with row indices
420        data[0..10].fill(0);
421        data[10..20].fill(1);
422        data[20..30].fill(2);
423
424        let rb = unsafe { RowAccessor::new_with_buf(data.as_mut_ptr(), 10, 3, -10) };
425
426        // With negative stride, row 0 should point to the LAST 10 bytes
427        assert_eq!(rb.row_slice(0)[0], 2);
428        assert_eq!(rb.row_slice(1)[0], 1);
429        assert_eq!(rb.row_slice(2)[0], 0);
430    }
431
432    #[test]
433    fn test_row_accessor_clear() {
434        let mut data = vec![0u8; 20];
435        let mut rb = unsafe { RowAccessor::new_with_buf(data.as_mut_ptr(), 5, 4, 5) };
436        rb.clear(0xFF);
437        for byte in &data {
438            assert_eq!(*byte, 0xFF);
439        }
440    }
441
442    #[test]
443    fn test_row_accessor_row_data() {
444        let mut data = vec![0u8; 30];
445        data[10] = 55; // row 1, pixel 0
446        let rb = unsafe { RowAccessor::new_with_buf(data.as_mut_ptr(), 10, 3, 10) };
447        let rd = rb.row(1);
448        assert_eq!(rd.x1, 0);
449        assert_eq!(rd.x2, 9);
450        assert_eq!(rd.ptr[0], 55);
451    }
452
453    #[test]
454    fn test_row_ptr_cache_basic() {
455        let mut data = vec![0u8; 40];
456        let rpc = unsafe { RowPtrCache::new_with_buf(data.as_mut_ptr(), 10, 4, 10) };
457        assert_eq!(rpc.width(), 10);
458        assert_eq!(rpc.height(), 4);
459        assert_eq!(rpc.stride(), 10);
460    }
461
462    #[test]
463    fn test_row_ptr_cache_write_read() {
464        let mut data = vec![0u8; 30];
465        let mut rpc = unsafe { RowPtrCache::new_with_buf(data.as_mut_ptr(), 10, 3, 10) };
466
467        rpc.row_slice_mut(2)[5] = 77;
468        assert_eq!(rpc.row_slice(2)[5], 77);
469        assert_eq!(rpc.row_slice(0)[5], 0);
470    }
471
472    #[test]
473    fn test_row_ptr_cache_negative_stride() {
474        let mut data = vec![0u8; 30];
475        data[0..10].fill(0);
476        data[10..20].fill(1);
477        data[20..30].fill(2);
478
479        let rpc = unsafe { RowPtrCache::new_with_buf(data.as_mut_ptr(), 10, 3, -10) };
480
481        assert_eq!(rpc.row_slice(0)[0], 2);
482        assert_eq!(rpc.row_slice(1)[0], 1);
483        assert_eq!(rpc.row_slice(2)[0], 0);
484    }
485
486    #[test]
487    fn test_row_ptr_cache_clear() {
488        let mut data = vec![0u8; 20];
489        let mut rpc = unsafe { RowPtrCache::new_with_buf(data.as_mut_ptr(), 5, 4, 5) };
490        rpc.clear(0xAA);
491        for byte in &data {
492            assert_eq!(*byte, 0xAA);
493        }
494    }
495
496    #[test]
497    fn test_copy_from_accessor_to_accessor() {
498        let mut src_data = vec![0u8; 30];
499        for (i, byte) in src_data.iter_mut().enumerate() {
500            *byte = i as u8;
501        }
502        let src = unsafe { RowAccessor::new_with_buf(src_data.as_mut_ptr(), 10, 3, 10) };
503
504        let mut dst_data = vec![0u8; 30];
505        let mut dst = unsafe { RowAccessor::new_with_buf(dst_data.as_mut_ptr(), 10, 3, 10) };
506        dst.copy_from(&src);
507
508        assert_eq!(dst_data, src_data);
509    }
510
511    #[test]
512    fn test_copy_from_cache_to_cache() {
513        let mut src_data = vec![0u8; 30];
514        for (i, byte) in src_data.iter_mut().enumerate() {
515            *byte = (i * 2) as u8;
516        }
517        let src = unsafe { RowPtrCache::new_with_buf(src_data.as_mut_ptr(), 10, 3, 10) };
518
519        let mut dst_data = vec![0u8; 30];
520        let mut dst = unsafe { RowPtrCache::new_with_buf(dst_data.as_mut_ptr(), 10, 3, 10) };
521        dst.copy_from(&src);
522
523        assert_eq!(dst_data, src_data);
524    }
525
526    #[test]
527    fn test_copy_from_different_sizes() {
528        // Source is smaller than destination
529        let mut src_data = vec![42u8; 10]; // 5×2
530        let src = unsafe { RowAccessor::new_with_buf(src_data.as_mut_ptr(), 5, 2, 5) };
531
532        let mut dst_data = vec![0u8; 30]; // 10×3
533        let mut dst = unsafe { RowAccessor::new_with_buf(dst_data.as_mut_ptr(), 10, 3, 10) };
534        dst.copy_from(&src);
535
536        // Only the first 5 bytes of the first 2 rows should be copied
537        assert_eq!(dst_data[0..5], [42, 42, 42, 42, 42]);
538        assert_eq!(dst_data[5..10], [0, 0, 0, 0, 0]);
539        assert_eq!(dst_data[10..15], [42, 42, 42, 42, 42]);
540        assert_eq!(dst_data[15..20], [0, 0, 0, 0, 0]);
541        assert_eq!(dst_data[20..30], [0; 10]); // row 2 untouched
542    }
543
544    #[test]
545    fn test_row_accessor_default() {
546        let rb = RowAccessor::new();
547        assert_eq!(rb.width(), 0);
548        assert_eq!(rb.height(), 0);
549    }
550
551    #[test]
552    fn test_row_ptr_cache_default() {
553        let rpc = RowPtrCache::new();
554        assert_eq!(rpc.width(), 0);
555        assert_eq!(rpc.height(), 0);
556    }
557
558    #[test]
559    fn test_rendering_buffer_alias() {
560        // Verify the type alias compiles
561        let mut data = vec![0u8; 20];
562        let _rb: RenderingBuffer =
563            unsafe { RenderingBuffer::new_with_buf(data.as_mut_ptr(), 5, 4, 5) };
564    }
565}