1use {
2 crate::{memory::Memory, Size},
3 hal::{device::Device as _, Backend},
4 std::{iter, ops::Range, ptr::NonNull, slice},
5};
6
7#[derive(Debug)]
8struct Flush<'a, B: Backend> {
9 device: &'a B::Device,
10 memory: &'a B::Memory,
11 segment: hal::memory::Segment,
12}
13
14#[derive(Debug)]
17pub struct Writer<'a, 'b, T, B: Backend> {
18 pub slice: &'a mut [T],
20 flush: Option<Flush<'b, B>>,
21}
22
23impl<T, B: Backend> Writer<'_, '_, T, B> {
24 pub fn forget(mut self) -> (*mut T, Option<hal::memory::Segment>) {
29 (
30 self.slice.as_mut_ptr(),
31 self.flush.take().map(|f| f.segment),
32 )
33 }
34}
35
36impl<'a, 'b, T, B: Backend> Drop for Writer<'a, 'b, T, B> {
37 fn drop(&mut self) {
38 if let Some(f) = self.flush.take() {
39 unsafe {
40 f.device
41 .flush_mapped_memory_ranges(iter::once((f.memory, f.segment)))
42 .expect("Should flush successfully")
43 };
44 }
45 }
46}
47
48#[derive(Debug)]
51pub struct MappedRange<'a, B: Backend> {
52 memory: &'a Memory<B>,
54
55 ptr: NonNull<u8>,
57
58 mapping_range: Range<Size>,
60
61 requested_range: Range<Size>,
64}
65
66impl<'a, B: Backend> MappedRange<'a, B> {
67 pub(crate) unsafe fn from_raw(
75 memory: &'a Memory<B>,
76 ptr: *mut u8,
77 mapping_range: Range<Size>,
78 requested_range: Range<Size>,
79 ) -> Self {
80 debug_assert!(
81 mapping_range.start < mapping_range.end,
82 "Memory mapping region must have valid size"
83 );
84
85 debug_assert!(
86 requested_range.start < requested_range.end,
87 "Memory mapping region must have valid size"
88 );
89
90 match memory.non_coherent_atom_size {
91 Some(atom) => {
92 debug_assert_eq!((mapping_range.start % atom.get(), mapping_range.end % atom.get()), (0, 0),
93 "Bounds of non-coherent memory mapping ranges must be multiple of `Limits::non_coherent_atom_size`",
94 );
95 debug_assert!(
96 crate::is_sub_range(&requested_range, &mapping_range),
97 "Requested {:?} must be sub-range of mapping {:?}",
98 requested_range,
99 mapping_range,
100 );
101 }
102 None => {
103 debug_assert_eq!(mapping_range, requested_range);
104 }
105 };
106
107 MappedRange {
108 ptr: NonNull::new_unchecked(ptr),
109 mapping_range,
110 requested_range,
111 memory,
112 }
113 }
114
115 pub fn ptr(&self) -> NonNull<u8> {
118 let offset = (self.requested_range.start - self.mapping_range.start) as isize;
119 unsafe { NonNull::new_unchecked(self.ptr.as_ptr().offset(offset)) }
120 }
121
122 pub fn range(&self) -> Range<Size> {
124 self.requested_range.clone()
125 }
126
127 pub fn is_coherent(&self) -> bool {
129 self.memory.non_coherent_atom_size.is_none()
130 }
131
132 pub unsafe fn read<'b, T>(
140 &'b mut self,
141 device: &B::Device,
142 segment: hal::memory::Segment,
143 ) -> Result<&'b [T], hal::device::MapError>
144 where
145 'a: 'b,
146 T: Copy,
147 {
148 let sub_range = crate::segment_to_sub_range(segment, &self.requested_range)?;
149
150 if let Some(atom) = self.memory.non_coherent_atom_size {
151 let aligned_range = crate::align_range(&sub_range, atom);
152 let segment = hal::memory::Segment {
153 offset: aligned_range.start,
154 size: Some(aligned_range.end - aligned_range.start),
155 };
156 device.invalidate_mapped_memory_ranges(iter::once((self.memory.raw(), segment)))?;
157 }
158
159 let ptr = self
160 .ptr
161 .as_ptr()
162 .offset((sub_range.start - self.mapping_range.start) as isize);
163 let size = (sub_range.end - sub_range.start) as usize;
164
165 let (_pre, slice, _post) = slice::from_raw_parts(ptr, size).align_to();
166 Ok(slice)
167 }
168
169 pub unsafe fn write<'b, T: 'b>(
176 &'b mut self,
177 device: &'b B::Device,
178 segment: hal::memory::Segment,
179 ) -> Result<Writer<'a, 'b, T, B>, hal::device::MapError>
180 where
181 'a: 'b,
182 T: Copy,
183 {
184 let sub_range = crate::segment_to_sub_range(segment, &self.requested_range)?;
185 let ptr = self
186 .ptr
187 .as_ptr()
188 .offset((sub_range.start - self.mapping_range.start) as isize);
189 let size = (sub_range.end - sub_range.start) as usize;
190
191 let (_pre, slice, _post) = slice::from_raw_parts_mut(ptr, size).align_to_mut();
192 let memory = self.memory.raw();
193 let flush = self.memory.non_coherent_atom_size.map(|atom| Flush {
194 device,
195 memory,
196 segment: {
197 let range = crate::align_range(&sub_range, atom);
198 hal::memory::Segment {
199 offset: range.start,
200 size: Some(range.end - range.start),
201 }
202 },
203 });
204 Ok(Writer { slice, flush })
205 }
206}