1#[derive(Debug, Clone, Copy)]
16pub struct RowData<'a> {
17 pub x1: i32,
18 pub x2: i32,
19 pub ptr: &'a [u8],
20}
21
22pub struct RowAccessor {
30 buf: *mut u8,
31 start: *mut u8,
32 width: u32,
33 height: u32,
34 stride: i32,
35}
36
37impl RowAccessor {
38 pub fn new() -> Self {
40 Self {
41 buf: std::ptr::null_mut(),
42 start: std::ptr::null_mut(),
43 width: 0,
44 height: 0,
45 stride: 0,
46 }
47 }
48
49 pub unsafe fn new_with_buf(buf: *mut u8, width: u32, height: u32, stride: i32) -> Self {
55 let mut ra = Self::new();
56 ra.attach(buf, width, height, stride);
57 ra
58 }
59
60 pub unsafe fn attach(&mut self, buf: *mut u8, width: u32, height: u32, stride: i32) {
65 self.buf = buf;
66 self.start = buf;
67 self.width = width;
68 self.height = height;
69 self.stride = stride;
70 if stride < 0 {
71 self.start = buf.offset(-((height as i64 - 1) * stride as i64) as isize);
72 }
73 }
74
75 pub fn buf(&self) -> *mut u8 {
77 self.buf
78 }
79
80 pub fn width(&self) -> u32 {
81 self.width
82 }
83
84 pub fn height(&self) -> u32 {
85 self.height
86 }
87
88 pub fn stride(&self) -> i32 {
89 self.stride
90 }
91
92 pub fn stride_abs(&self) -> u32 {
93 self.stride.unsigned_abs()
94 }
95
96 #[inline]
101 pub unsafe fn row_ptr(&self, y: i32) -> *mut u8 {
102 self.start.offset((y as i64 * self.stride as i64) as isize)
103 }
104
105 pub fn row_slice(&self, y: u32) -> &[u8] {
109 assert!(
110 y < self.height,
111 "row {} out of bounds (height={})",
112 y,
113 self.height
114 );
115 unsafe {
116 let ptr = self.row_ptr(y as i32);
117 std::slice::from_raw_parts(ptr, self.stride_abs() as usize)
118 }
119 }
120
121 pub fn row_slice_mut(&mut self, y: u32) -> &mut [u8] {
123 assert!(
124 y < self.height,
125 "row {} out of bounds (height={})",
126 y,
127 self.height
128 );
129 unsafe {
130 let ptr = self.row_ptr(y as i32);
131 std::slice::from_raw_parts_mut(ptr, self.stride_abs() as usize)
132 }
133 }
134
135 pub fn row(&self, y: u32) -> RowData<'_> {
137 RowData {
138 x1: 0,
139 x2: self.width as i32 - 1,
140 ptr: self.row_slice(y),
141 }
142 }
143
144 pub fn copy_from<T: RenderingBufferAccess>(&mut self, src: &T) {
146 let h = self.height.min(src.height());
147 let l = self.stride_abs().min(src.stride_abs()) as usize;
148 for y in 0..h {
149 unsafe {
150 let dst = self.row_ptr(y as i32);
151 let src_ptr = src.row_ptr_const(y as i32);
152 std::ptr::copy_nonoverlapping(src_ptr, dst, l);
153 }
154 }
155 }
156
157 pub fn clear(&mut self, value: u8) {
159 let stride = self.stride_abs() as usize;
160 for y in 0..self.height {
161 let row = self.row_slice_mut(y);
162 for byte in row[..stride].iter_mut() {
163 *byte = value;
164 }
165 }
166 }
167}
168
169impl Default for RowAccessor {
170 fn default() -> Self {
171 Self::new()
172 }
173}
174
175pub struct RowPtrCache {
183 buf: *mut u8,
184 rows: Vec<*mut u8>,
185 width: u32,
186 height: u32,
187 stride: i32,
188}
189
190impl RowPtrCache {
191 pub fn new() -> Self {
193 Self {
194 buf: std::ptr::null_mut(),
195 rows: Vec::new(),
196 width: 0,
197 height: 0,
198 stride: 0,
199 }
200 }
201
202 pub unsafe fn new_with_buf(buf: *mut u8, width: u32, height: u32, stride: i32) -> Self {
208 let mut rpc = Self::new();
209 rpc.attach(buf, width, height, stride);
210 rpc
211 }
212
213 pub unsafe fn attach(&mut self, buf: *mut u8, width: u32, height: u32, stride: i32) {
218 self.buf = buf;
219 self.width = width;
220 self.height = height;
221 self.stride = stride;
222
223 if (height as usize) > self.rows.len() {
224 self.rows.resize(height as usize, std::ptr::null_mut());
225 }
226
227 let mut row_ptr = buf;
228 if stride < 0 {
229 row_ptr = buf.offset(-((height as i64 - 1) * stride as i64) as isize);
230 }
231
232 for y in 0..height as usize {
233 self.rows[y] = row_ptr;
234 row_ptr = row_ptr.offset(stride as isize);
235 }
236 }
237
238 pub fn buf(&self) -> *mut u8 {
240 self.buf
241 }
242
243 pub fn width(&self) -> u32 {
244 self.width
245 }
246
247 pub fn height(&self) -> u32 {
248 self.height
249 }
250
251 pub fn stride(&self) -> i32 {
252 self.stride
253 }
254
255 pub fn stride_abs(&self) -> u32 {
256 self.stride.unsigned_abs()
257 }
258
259 #[inline]
264 pub unsafe fn row_ptr(&self, y: i32) -> *mut u8 {
265 *self.rows.get_unchecked(y as usize)
266 }
267
268 pub fn row_slice(&self, y: u32) -> &[u8] {
270 assert!(
271 y < self.height,
272 "row {} out of bounds (height={})",
273 y,
274 self.height
275 );
276 unsafe {
277 let ptr = self.row_ptr(y as i32);
278 std::slice::from_raw_parts(ptr, self.stride_abs() as usize)
279 }
280 }
281
282 pub fn row_slice_mut(&mut self, y: u32) -> &mut [u8] {
284 assert!(
285 y < self.height,
286 "row {} out of bounds (height={})",
287 y,
288 self.height
289 );
290 unsafe {
291 let ptr = self.row_ptr(y as i32);
292 std::slice::from_raw_parts_mut(ptr, self.stride_abs() as usize)
293 }
294 }
295
296 pub fn row(&self, y: u32) -> RowData<'_> {
298 RowData {
299 x1: 0,
300 x2: self.width as i32 - 1,
301 ptr: self.row_slice(y),
302 }
303 }
304
305 pub fn copy_from<T: RenderingBufferAccess>(&mut self, src: &T) {
307 let h = self.height.min(src.height());
308 let l = self.stride_abs().min(src.stride_abs()) as usize;
309 for y in 0..h {
310 unsafe {
311 let dst = self.row_ptr(y as i32);
312 let src_ptr = src.row_ptr_const(y as i32);
313 std::ptr::copy_nonoverlapping(src_ptr, dst, l);
314 }
315 }
316 }
317
318 pub fn clear(&mut self, value: u8) {
320 let stride = self.stride_abs() as usize;
321 for y in 0..self.height {
322 let row = self.row_slice_mut(y);
323 for byte in row[..stride].iter_mut() {
324 *byte = value;
325 }
326 }
327 }
328}
329
330impl Default for RowPtrCache {
331 fn default() -> Self {
332 Self::new()
333 }
334}
335
336pub trait RenderingBufferAccess {
342 fn width(&self) -> u32;
343 fn height(&self) -> u32;
344 fn stride_abs(&self) -> u32;
345 unsafe fn row_ptr_const(&self, y: i32) -> *const u8;
348}
349
350impl RenderingBufferAccess for RowAccessor {
351 fn width(&self) -> u32 {
352 self.width
353 }
354 fn height(&self) -> u32 {
355 self.height
356 }
357 fn stride_abs(&self) -> u32 {
358 self.stride_abs()
359 }
360 unsafe fn row_ptr_const(&self, y: i32) -> *const u8 {
361 self.row_ptr(y) as *const u8
362 }
363}
364
365impl RenderingBufferAccess for RowPtrCache {
366 fn width(&self) -> u32 {
367 self.width
368 }
369 fn height(&self) -> u32 {
370 self.height
371 }
372 fn stride_abs(&self) -> u32 {
373 self.stride_abs()
374 }
375 unsafe fn row_ptr_const(&self, y: i32) -> *const u8 {
376 self.row_ptr(y) as *const u8
377 }
378}
379
380pub type RenderingBuffer = RowAccessor;
382
383#[cfg(test)]
388mod tests {
389 use super::*;
390
391 #[test]
392 fn test_row_accessor_basic() {
393 let mut data = vec![0u8; 40]; let rb = unsafe { RowAccessor::new_with_buf(data.as_mut_ptr(), 10, 4, 10) };
395 assert_eq!(rb.width(), 10);
396 assert_eq!(rb.height(), 4);
397 assert_eq!(rb.stride(), 10);
398 assert_eq!(rb.stride_abs(), 10);
399 }
400
401 #[test]
402 fn test_row_accessor_write_read() {
403 let mut data = vec![0u8; 30]; let mut rb = unsafe { RowAccessor::new_with_buf(data.as_mut_ptr(), 10, 3, 10) };
405
406 rb.row_slice_mut(1)[0] = 42;
408 rb.row_slice_mut(1)[9] = 99;
409
410 assert_eq!(rb.row_slice(1)[0], 42);
412 assert_eq!(rb.row_slice(1)[9], 99);
413 assert_eq!(rb.row_slice(0)[0], 0); }
415
416 #[test]
417 fn test_row_accessor_negative_stride() {
418 let mut data = vec![0u8; 30]; data[0..10].fill(0);
421 data[10..20].fill(1);
422 data[20..30].fill(2);
423
424 let rb = unsafe { RowAccessor::new_with_buf(data.as_mut_ptr(), 10, 3, -10) };
425
426 assert_eq!(rb.row_slice(0)[0], 2);
428 assert_eq!(rb.row_slice(1)[0], 1);
429 assert_eq!(rb.row_slice(2)[0], 0);
430 }
431
432 #[test]
433 fn test_row_accessor_clear() {
434 let mut data = vec![0u8; 20];
435 let mut rb = unsafe { RowAccessor::new_with_buf(data.as_mut_ptr(), 5, 4, 5) };
436 rb.clear(0xFF);
437 for byte in &data {
438 assert_eq!(*byte, 0xFF);
439 }
440 }
441
442 #[test]
443 fn test_row_accessor_row_data() {
444 let mut data = vec![0u8; 30];
445 data[10] = 55; let rb = unsafe { RowAccessor::new_with_buf(data.as_mut_ptr(), 10, 3, 10) };
447 let rd = rb.row(1);
448 assert_eq!(rd.x1, 0);
449 assert_eq!(rd.x2, 9);
450 assert_eq!(rd.ptr[0], 55);
451 }
452
453 #[test]
454 fn test_row_ptr_cache_basic() {
455 let mut data = vec![0u8; 40];
456 let rpc = unsafe { RowPtrCache::new_with_buf(data.as_mut_ptr(), 10, 4, 10) };
457 assert_eq!(rpc.width(), 10);
458 assert_eq!(rpc.height(), 4);
459 assert_eq!(rpc.stride(), 10);
460 }
461
462 #[test]
463 fn test_row_ptr_cache_write_read() {
464 let mut data = vec![0u8; 30];
465 let mut rpc = unsafe { RowPtrCache::new_with_buf(data.as_mut_ptr(), 10, 3, 10) };
466
467 rpc.row_slice_mut(2)[5] = 77;
468 assert_eq!(rpc.row_slice(2)[5], 77);
469 assert_eq!(rpc.row_slice(0)[5], 0);
470 }
471
472 #[test]
473 fn test_row_ptr_cache_negative_stride() {
474 let mut data = vec![0u8; 30];
475 data[0..10].fill(0);
476 data[10..20].fill(1);
477 data[20..30].fill(2);
478
479 let rpc = unsafe { RowPtrCache::new_with_buf(data.as_mut_ptr(), 10, 3, -10) };
480
481 assert_eq!(rpc.row_slice(0)[0], 2);
482 assert_eq!(rpc.row_slice(1)[0], 1);
483 assert_eq!(rpc.row_slice(2)[0], 0);
484 }
485
486 #[test]
487 fn test_row_ptr_cache_clear() {
488 let mut data = vec![0u8; 20];
489 let mut rpc = unsafe { RowPtrCache::new_with_buf(data.as_mut_ptr(), 5, 4, 5) };
490 rpc.clear(0xAA);
491 for byte in &data {
492 assert_eq!(*byte, 0xAA);
493 }
494 }
495
496 #[test]
497 fn test_copy_from_accessor_to_accessor() {
498 let mut src_data = vec![0u8; 30];
499 for (i, byte) in src_data.iter_mut().enumerate() {
500 *byte = i as u8;
501 }
502 let src = unsafe { RowAccessor::new_with_buf(src_data.as_mut_ptr(), 10, 3, 10) };
503
504 let mut dst_data = vec![0u8; 30];
505 let mut dst = unsafe { RowAccessor::new_with_buf(dst_data.as_mut_ptr(), 10, 3, 10) };
506 dst.copy_from(&src);
507
508 assert_eq!(dst_data, src_data);
509 }
510
511 #[test]
512 fn test_copy_from_cache_to_cache() {
513 let mut src_data = vec![0u8; 30];
514 for (i, byte) in src_data.iter_mut().enumerate() {
515 *byte = (i * 2) as u8;
516 }
517 let src = unsafe { RowPtrCache::new_with_buf(src_data.as_mut_ptr(), 10, 3, 10) };
518
519 let mut dst_data = vec![0u8; 30];
520 let mut dst = unsafe { RowPtrCache::new_with_buf(dst_data.as_mut_ptr(), 10, 3, 10) };
521 dst.copy_from(&src);
522
523 assert_eq!(dst_data, src_data);
524 }
525
526 #[test]
527 fn test_copy_from_different_sizes() {
528 let mut src_data = vec![42u8; 10]; let src = unsafe { RowAccessor::new_with_buf(src_data.as_mut_ptr(), 5, 2, 5) };
531
532 let mut dst_data = vec![0u8; 30]; let mut dst = unsafe { RowAccessor::new_with_buf(dst_data.as_mut_ptr(), 10, 3, 10) };
534 dst.copy_from(&src);
535
536 assert_eq!(dst_data[0..5], [42, 42, 42, 42, 42]);
538 assert_eq!(dst_data[5..10], [0, 0, 0, 0, 0]);
539 assert_eq!(dst_data[10..15], [42, 42, 42, 42, 42]);
540 assert_eq!(dst_data[15..20], [0, 0, 0, 0, 0]);
541 assert_eq!(dst_data[20..30], [0; 10]); }
543
544 #[test]
545 fn test_row_accessor_default() {
546 let rb = RowAccessor::new();
547 assert_eq!(rb.width(), 0);
548 assert_eq!(rb.height(), 0);
549 }
550
551 #[test]
552 fn test_row_ptr_cache_default() {
553 let rpc = RowPtrCache::new();
554 assert_eq!(rpc.width(), 0);
555 assert_eq!(rpc.height(), 0);
556 }
557
558 #[test]
559 fn test_rendering_buffer_alias() {
560 let mut data = vec![0u8; 20];
562 let _rb: RenderingBuffer =
563 unsafe { RenderingBuffer::new_with_buf(data.as_mut_ptr(), 5, 4, 5) };
564 }
565}