1use std::fmt::Debug;
7
8use crate::{
9 error::Result,
10 util::{CACHE_LINE_BYTE_SIZE, MemoryGuard, MemoryTracker},
11};
12
13use super::{Rect, internal::RawImageBuffer};
14
15pub struct OwnedRawImage {
16 pub(super) data: RawImageBuffer,
19 pub(super) offset: (usize, usize),
20 padding: (usize, usize),
21 tracker: Option<MemoryTracker>,
24 tracked_bytes: u64,
25}
26
27impl OwnedRawImage {
28 pub fn new(byte_size: (usize, usize)) -> Result<Self> {
29 Self::new_zeroed_with_padding(byte_size, (0, 0), (0, 0))
30 }
31
32 pub fn new_uninit(byte_size: (usize, usize)) -> Result<Self> {
38 let mut padding = (0usize, 0usize);
39 if !(padding.0 + byte_size.0).is_multiple_of(CACHE_LINE_BYTE_SIZE) {
40 padding.0 += CACHE_LINE_BYTE_SIZE - (padding.0 + byte_size.0) % CACHE_LINE_BYTE_SIZE;
41 }
42 Ok(Self {
43 data: RawImageBuffer::try_allocate(
44 (byte_size.0 + padding.0, byte_size.1 + padding.1),
45 true,
46 )?,
47 offset: (0, 0),
48 padding,
49 tracker: None,
50 tracked_bytes: 0,
51 })
52 }
53
54 pub fn new_zeroed_with_padding(
55 byte_size: (usize, usize),
56 offset: (usize, usize),
57 mut padding: (usize, usize),
58 ) -> Result<Self> {
59 if !(padding.0 + byte_size.0).is_multiple_of(CACHE_LINE_BYTE_SIZE) {
62 padding.0 += CACHE_LINE_BYTE_SIZE - (padding.0 + byte_size.0) % CACHE_LINE_BYTE_SIZE;
63 }
64 Ok(Self {
65 data: RawImageBuffer::try_allocate(
66 (byte_size.0 + padding.0, byte_size.1 + padding.1),
67 false,
68 )?,
69 offset,
70 padding,
71 tracker: None,
72 tracked_bytes: 0,
73 })
74 }
75
76 #[inline]
77 pub fn get_rect_including_padding_mut(&mut self, rect: Rect) -> RawImageRectMut<'_> {
78 let (bpr, nr, bbr) = self.data.dimensions();
79 let storage = self.data.data_slice_mut();
80 sub_rect_mut(storage, bpr, nr, bbr, rect)
81 }
82
83 #[inline]
84 pub fn get_rect_including_padding(&self, rect: Rect) -> RawImageRect<'_> {
85 let (bpr, nr, bbr) = self.data.dimensions();
86 let storage = self.data.data_slice();
87 sub_rect(storage, bpr, nr, bbr, rect)
88 }
89
90 #[inline]
91 fn shift_rect(&self, rect: Rect) -> Rect {
92 if cfg!(debug_assertions) {
93 rect.check_within(self.byte_size());
95 }
96 Rect {
97 origin: (rect.origin.0 + self.offset.0, rect.origin.1 + self.offset.1),
98 size: rect.size,
99 }
100 }
101
102 #[inline]
103 pub fn get_rect_mut(&mut self, rect: Rect) -> RawImageRectMut<'_> {
104 self.get_rect_including_padding_mut(self.shift_rect(rect))
105 }
106
107 #[inline]
108 pub fn get_rect(&self, rect: Rect) -> RawImageRect<'_> {
109 self.get_rect_including_padding(self.shift_rect(rect))
110 }
111
112 #[inline(always)]
113 pub fn row_mut(&mut self, row: usize) -> &mut [u8] {
114 let offset = self.offset;
115 let end = offset.0 + self.byte_size().0;
116 let row = self.data.row_mut(row + offset.1);
117 &mut row[offset.0..end]
118 }
119
120 #[inline(always)]
121 pub fn row(&self, row: usize) -> &[u8] {
122 let offset = self.offset;
123 let end = offset.0 + self.byte_size().0;
124 let row = self.data.row(row + offset.1);
125 &row[offset.0..end]
126 }
127
128 #[inline]
129 pub fn byte_size(&self) -> (usize, usize) {
130 let size = self.data.byte_size();
131 (size.0 - self.padding.0, size.1 - self.padding.1)
132 }
133
134 #[inline]
135 pub fn byte_offset(&self) -> (usize, usize) {
136 self.offset
137 }
138
139 #[inline]
140 pub fn byte_padding(&self) -> (usize, usize) {
141 self.padding
142 }
143
144 #[cfg(feature = "threads")]
148 pub fn prefault_parallel(&mut self) {
149 use rayon::prelude::*;
150 const PAGE_SIZE: usize = 4096;
151 let data = self.data.data_slice_mut();
152 if data.is_empty() {
153 return;
154 }
155 data.par_chunks_mut(PAGE_SIZE).for_each(|chunk| {
156 chunk[0] = 0;
157 });
158 }
159
160 pub(super) fn set_tracker(&mut self, tracker: MemoryTracker, bytes: u64) {
163 self.tracker = Some(tracker);
164 self.tracked_bytes = bytes;
165 }
166
167 pub fn try_clone(&self) -> Result<OwnedRawImage> {
168 if let Some(tracker) = &self.tracker {
170 tracker.try_allocate(self.tracked_bytes)?;
171 }
172 let guard: Option<MemoryGuard> = self
174 .tracker
175 .as_ref()
176 .map(|t| MemoryGuard::new(t.clone(), self.tracked_bytes));
177
178 let clone = Self {
179 data: self.data.try_clone()?,
180 offset: self.offset,
181 padding: self.padding,
182 tracker: self.tracker.clone(),
183 tracked_bytes: self.tracked_bytes,
184 };
185
186 if let Some(g) = guard {
188 g.disarm();
189 }
190
191 Ok(clone)
192 }
193}
194
195impl Drop for OwnedRawImage {
196 fn drop(&mut self) {
197 if let Some(tracker) = &self.tracker {
198 tracker.release(self.tracked_bytes);
199 }
200 self.data.deallocate();
202 }
203}
204
205#[inline]
207fn sub_rect<'a>(
208 storage: &'a [u8],
209 bpr: usize,
210 nr: usize,
211 bbr: usize,
212 rect: Rect,
213) -> RawImageRect<'a> {
214 if rect.size.0 == 0 || rect.size.1 == 0 {
215 return RawImageRect {
216 storage: &[],
217 bytes_per_row: 0,
218 num_rows: 0,
219 bytes_between_rows: 0,
220 };
221 }
222 assert!(rect.origin.1 + rect.size.1 <= nr);
223 assert!(rect.origin.0 + rect.size.0 <= bpr);
224 let new_start = rect.origin.1 * bbr + rect.origin.0;
225 let data_span = (rect.size.1 - 1) * bbr + rect.size.0;
226 assert!(new_start + data_span <= storage.len());
227 RawImageRect {
228 storage: &storage[new_start..new_start + data_span],
229 bytes_per_row: rect.size.0,
230 num_rows: rect.size.1,
231 bytes_between_rows: bbr,
232 }
233}
234
235#[inline]
237fn sub_rect_mut<'a>(
238 storage: &'a mut [u8],
239 bpr: usize,
240 nr: usize,
241 bbr: usize,
242 rect: Rect,
243) -> RawImageRectMut<'a> {
244 if rect.size.0 == 0 || rect.size.1 == 0 {
245 return RawImageRectMut {
246 storage: &mut [],
247 bytes_per_row: 0,
248 num_rows: 0,
249 bytes_between_rows: 0,
250 };
251 }
252 assert!(rect.origin.1 + rect.size.1 <= nr);
253 assert!(rect.origin.0 + rect.size.0 <= bpr);
254 let new_start = rect.origin.1 * bbr + rect.origin.0;
255 let data_span = (rect.size.1 - 1) * bbr + rect.size.0;
256 assert!(new_start + data_span <= storage.len());
257 RawImageRectMut {
258 storage: &mut storage[new_start..new_start + data_span],
259 bytes_per_row: rect.size.0,
260 num_rows: rect.size.1,
261 bytes_between_rows: bbr,
262 }
263}
264
265#[derive(Clone, Copy)]
268pub struct RawImageRect<'a> {
269 pub(super) storage: &'a [u8],
270 pub(super) bytes_per_row: usize,
271 pub(super) num_rows: usize,
272 pub(super) bytes_between_rows: usize,
273}
274
275impl<'a> RawImageRect<'a> {
276 #[inline(always)]
277 pub fn row(&self, row: usize) -> &'a [u8] {
278 assert!(row < self.num_rows);
279 let start = row * self.bytes_between_rows;
280 &self.storage[start..start + self.bytes_per_row]
281 }
282
283 #[inline]
284 pub fn rect(&self, rect: Rect) -> RawImageRect<'a> {
285 sub_rect(
286 self.storage,
287 self.bytes_per_row,
288 self.num_rows,
289 self.bytes_between_rows,
290 rect,
291 )
292 }
293
294 #[inline]
295 pub fn byte_size(&self) -> (usize, usize) {
296 (self.bytes_per_row, self.num_rows)
297 }
298
299 #[inline]
300 pub(super) fn is_aligned(&self, align: usize) -> bool {
301 if self.num_rows == 0 {
302 return true;
303 }
304 self.bytes_per_row.is_multiple_of(align)
305 && self.bytes_between_rows.is_multiple_of(align)
306 && (self.storage.as_ptr() as usize).is_multiple_of(align)
307 }
308}
309
310pub struct RawImageRectMut<'a> {
312 pub(super) storage: &'a mut [u8],
313 pub(super) bytes_per_row: usize,
314 pub(super) num_rows: usize,
315 pub(super) bytes_between_rows: usize,
316}
317
318impl<'a> RawImageRectMut<'a> {
319 #[inline(always)]
320 pub fn row(&mut self, row: usize) -> &mut [u8] {
321 assert!(row < self.num_rows);
322 let start = row * self.bytes_between_rows;
323 &mut self.storage[start..start + self.bytes_per_row]
324 }
325
326 #[inline]
327 pub fn rect_mut(&mut self, rect: Rect) -> RawImageRectMut<'_> {
328 sub_rect_mut(
329 self.storage,
330 self.bytes_per_row,
331 self.num_rows,
332 self.bytes_between_rows,
333 rect,
334 )
335 }
336
337 pub fn as_rect(&self) -> RawImageRect<'_> {
338 RawImageRect {
339 storage: self.storage,
340 bytes_per_row: self.bytes_per_row,
341 num_rows: self.num_rows,
342 bytes_between_rows: self.bytes_between_rows,
343 }
344 }
345
346 #[inline]
347 pub fn byte_size(&self) -> (usize, usize) {
348 (self.bytes_per_row, self.num_rows)
349 }
350
351 #[inline]
352 pub(super) fn is_aligned(&self, align: usize) -> bool {
353 if self.num_rows == 0 {
354 return true;
355 }
356 self.bytes_per_row.is_multiple_of(align)
357 && self.bytes_between_rows.is_multiple_of(align)
358 && (self.storage.as_ptr() as usize).is_multiple_of(align)
359 }
360
361 pub(super) fn from_slice(
363 buf: &'a mut [u8],
364 num_rows: usize,
365 bytes_per_row: usize,
366 bytes_between_rows: usize,
367 ) -> Self {
368 RawImageBuffer::check_vals(num_rows, bytes_per_row, bytes_between_rows);
369 let expected_len = if num_rows == 0 {
370 0
371 } else {
372 (num_rows - 1) * bytes_between_rows + bytes_per_row
373 };
374 assert!(
375 buf.len() >= expected_len,
376 "buffer too small: {} < {}",
377 buf.len(),
378 expected_len
379 );
380 RawImageRectMut {
381 storage: if expected_len == 0 {
382 &mut []
383 } else {
384 &mut buf[..expected_len]
385 },
386 bytes_per_row,
387 num_rows,
388 bytes_between_rows,
389 }
390 }
391}
392
393impl Debug for OwnedRawImage {
394 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
395 write!(f, "raw {}x{}", self.byte_size().0, self.byte_size().1)
396 }
397}
398
399impl Debug for RawImageRect<'_> {
400 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
401 write!(f, "raw rect {}x{}", self.byte_size().0, self.byte_size().1)
402 }
403}
404
405impl Debug for RawImageRectMut<'_> {
406 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
407 write!(
408 f,
409 "raw mutrect {}x{}",
410 self.byte_size().0,
411 self.byte_size().1
412 )
413 }
414}